diff --git a/stf/.DS_Store b/stf/.DS_Store deleted file mode 100644 index 0986e54aae0cca21d730a9a94a5ab8e5718f1f8a..0000000000000000000000000000000000000000 Binary files a/stf/.DS_Store and /dev/null differ diff --git a/stf/089.npz b/stf/089.npz deleted file mode 100644 index 4fb9cd861ba673267d9d39585b607b409d968031..0000000000000000000000000000000000000000 --- a/stf/089.npz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9ce3fb07d8d15495eab879b47413c6b86bce114ca9ecd375b45b54777cf0e175 -size 522605028 diff --git a/stf/089.pth b/stf/089.pth deleted file mode 100644 index d665222f1d736538815ee5eb164b4044a3554328..0000000000000000000000000000000000000000 --- a/stf/089.pth +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ba4eb3437019d77abed141d60bcb5489b664f494cf965eec0bccf304c3d79b2a -size 1567401123 diff --git a/stf/TEMP/00000.jpg b/stf/TEMP/00000.jpg deleted file mode 100644 index 7e2fad33b198d0b4277701185c7e5e8e4d4fc2c4..0000000000000000000000000000000000000000 --- a/stf/TEMP/00000.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7a15d0dd1f46b4a1fddc0427064f5acb624d54011f5628a01af9351ba38e2f72 -size 130 diff --git a/stf/TEMP/00001.jpg b/stf/TEMP/00001.jpg deleted file mode 100644 index 34566b44a7b40bfe820b6c16de4a340b81b2fd5f..0000000000000000000000000000000000000000 --- a/stf/TEMP/00001.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a5f30cce1b3e6db6404236f523204b89db472d56eaed98ff2e905716cc2e0986 -size 130 diff --git a/stf/TEMP/00002.jpg b/stf/TEMP/00002.jpg deleted file mode 100644 index 63b259ee51b0711d5a13d88c328495ce469492bb..0000000000000000000000000000000000000000 --- a/stf/TEMP/00002.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:05bd8bd6f328d2b5708e7eef88ff8e6bb07bcf328b9f7126088a8c1f6a2e98be -size 130 diff --git a/stf/TEMP/00003.jpg b/stf/TEMP/00003.jpg deleted file mode 100644 index c1297f0c9f6461861501ad62fd4269330a2a0916..0000000000000000000000000000000000000000 --- a/stf/TEMP/00003.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:128147af73f0df54931b9f9c2ed0f4c223f7bd3cec5bad863c16bfe97d1cd346 -size 130 diff --git a/stf/TEMP/00004.jpg b/stf/TEMP/00004.jpg deleted file mode 100644 index f12725a1cd1fd3caec86abc73202c82671d3005e..0000000000000000000000000000000000000000 --- a/stf/TEMP/00004.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e67e733bdf9aae10ecef4a4781e340c49eae5966c799f4f274593c7ec7f314a8 -size 130 diff --git a/stf/TEMP/00005.jpg b/stf/TEMP/00005.jpg deleted file mode 100644 index d5ec35a33cf1d146cdbedcc659d48a4b3cbd3517..0000000000000000000000000000000000000000 --- a/stf/TEMP/00005.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:90aeee53c04010a69c04b25fa3a79c887cebdc84c8e820789b77f493d20bdc1b -size 130 diff --git a/stf/TEMP/00006.jpg b/stf/TEMP/00006.jpg deleted file mode 100644 index aeee57868cdb43590344334640e4055400957935..0000000000000000000000000000000000000000 --- a/stf/TEMP/00006.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:be53fb5f446d104b271f31432b166ba80b92158cf59c67c94b183739b8a0e9c9 -size 130 diff --git a/stf/TEMP/00007.jpg b/stf/TEMP/00007.jpg deleted file mode 100644 index e7773fb7b8e929d37dbead900f355f192594b51c..0000000000000000000000000000000000000000 --- a/stf/TEMP/00007.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b56951d181d68d7434ab7ce7c673c0e82d4104432965e26348db2e84a9aa502b -size 130 diff --git a/stf/TEMP/00008.jpg b/stf/TEMP/00008.jpg deleted file mode 100644 index bad61c7e0fdbaff3177fdd10dd07b63c0c0a7f00..0000000000000000000000000000000000000000 --- a/stf/TEMP/00008.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4fb296bb6c34ab594819a12537fff77a7affa332968852ec8a2d8b1a409a24df -size 130 diff --git a/stf/TEMP/00009.jpg b/stf/TEMP/00009.jpg deleted file mode 100644 index 711da541faf5169c22bbef485fea4b547c77b7f6..0000000000000000000000000000000000000000 --- a/stf/TEMP/00009.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:54c457f7cecf5becc79a42f1cf45b5b77b1fdfea23015ae21f27b7c95ea91186 -size 130 diff --git a/stf/TEMP/00010.jpg b/stf/TEMP/00010.jpg deleted file mode 100644 index 7bd17fba6288322e993cb420d88cdca631a892aa..0000000000000000000000000000000000000000 --- a/stf/TEMP/00010.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:64e21ee3522a80fb446414c79ed179d651a9d5e3ef3c0175301d8a97d14d243b -size 130 diff --git a/stf/TEMP/00011.jpg b/stf/TEMP/00011.jpg deleted file mode 100644 index 5834c80e42ff12aefc627c9a25c7621b461fba92..0000000000000000000000000000000000000000 --- a/stf/TEMP/00011.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:22b58ddfef79c4bdb9f95cc2f3e7d1e695e39f9bf8b1e2a734afa41fea0d5901 -size 130 diff --git a/stf/TEMP/00012.jpg b/stf/TEMP/00012.jpg deleted file mode 100644 index b5fc205c9a7e034425676e3555c9d2ddb10f5085..0000000000000000000000000000000000000000 --- a/stf/TEMP/00012.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:dac621627a4a017edb94fa978a8a1a758756c4af72b39c64e6eaf3abed1d3e9e -size 130 diff --git a/stf/TEMP/00013.jpg b/stf/TEMP/00013.jpg deleted file mode 100644 index ea635476029c98e2e5dfdea8f9c7bccd232298c9..0000000000000000000000000000000000000000 --- a/stf/TEMP/00013.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7a12b0e9731983355b8f0964da2ef61a75179607931d207d7505e04f06803e43 -size 130 diff --git a/stf/TEMP/00014.jpg b/stf/TEMP/00014.jpg deleted file mode 100644 index 36d64b2927a38d378e87c5f0f9ee28517baf7d8d..0000000000000000000000000000000000000000 --- a/stf/TEMP/00014.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c247799dddde08f80bd96ccb9858e68c8e206b569700b46ce380e8aaa0c41cac -size 130 diff --git a/stf/TEMP/00015.jpg b/stf/TEMP/00015.jpg deleted file mode 100644 index fe0cf29d3b82b8335231d660c29e100bdd2baaf5..0000000000000000000000000000000000000000 --- a/stf/TEMP/00015.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:17d3e9e1dc671a58baedda45a605a2eb989760af46123bed38c55e85afe4aace -size 130 diff --git a/stf/TEMP/00016.jpg b/stf/TEMP/00016.jpg deleted file mode 100644 index 3bd082ecdf8f35ed9db170884b039b35cf42356a..0000000000000000000000000000000000000000 --- a/stf/TEMP/00016.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:691ee72d541227bd677c0a5b4de88a37f17d845818599a0a5d47a83c9671dc8c -size 130 diff --git a/stf/TEMP/00017.jpg b/stf/TEMP/00017.jpg deleted file mode 100644 index 8a43de8dde9a90bd6569b74d109ff0bc575f29e6..0000000000000000000000000000000000000000 --- a/stf/TEMP/00017.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:03d857d106f535b7b6c2e47534b3e9e5aeed08c892ae7864a3a17c2e494eda8e -size 130 diff --git a/stf/TEMP/00018.jpg b/stf/TEMP/00018.jpg deleted file mode 100644 index fee20ce55a0be02051598755f0be7735cd76cc17..0000000000000000000000000000000000000000 --- a/stf/TEMP/00018.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:dd45c1c63011717edf41cb18146b6419441905472dc01700144fc3b3890de844 -size 130 diff --git a/stf/TEMP/00019.jpg b/stf/TEMP/00019.jpg deleted file mode 100644 index 4aa5476f72109157d6ccabe62b2c37c6627580b7..0000000000000000000000000000000000000000 --- a/stf/TEMP/00019.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4030e160a196cc3d086bb34e7a80a67a778b49235b3f7e7c1a03566399344365 -size 130 diff --git a/stf/TEMP/00020.jpg b/stf/TEMP/00020.jpg deleted file mode 100644 index 6c58d4f3951bc65a024f177370511546126afc99..0000000000000000000000000000000000000000 --- a/stf/TEMP/00020.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6fe59fa522a5f99c35fc8b1b03fc2e806f5b9b9a4c849da8fdaeacc78c350435 -size 130 diff --git a/stf/TEMP/00021.jpg b/stf/TEMP/00021.jpg deleted file mode 100644 index dd7e737c56db0b77951822167190fdaeebb0483f..0000000000000000000000000000000000000000 --- a/stf/TEMP/00021.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a453230f87d05edf2529072ae4de97168705e2d9aa7a976bb45aabd3b3a7839b -size 130 diff --git a/stf/TEMP/00022.jpg b/stf/TEMP/00022.jpg deleted file mode 100644 index 8dd6861aff0a54b7282254fa5ef9f017f7a0b3bb..0000000000000000000000000000000000000000 --- a/stf/TEMP/00022.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:cc66628806d471a835a953cdc6ab362d09a65f08af8e4245a5c942e2da2471a0 -size 130 diff --git a/stf/TEMP/00023.jpg b/stf/TEMP/00023.jpg deleted file mode 100644 index 3841012f401b69fc2895e84cc1632262a9cbd2b4..0000000000000000000000000000000000000000 --- a/stf/TEMP/00023.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:341e19c62839961e59bc2844738c8c1b6860a311a256ca86769ad465cee9d907 -size 130 diff --git a/stf/TEMP/00024.jpg b/stf/TEMP/00024.jpg deleted file mode 100644 index d4e4e1190aa35d35754a5bc2879e436c713e805a..0000000000000000000000000000000000000000 --- a/stf/TEMP/00024.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7cc2e73546938da29b13df0b3039d0d54e3c40f553521e27d06fc2c89e6709f1 -size 130 diff --git a/stf/TEMP/00025.jpg b/stf/TEMP/00025.jpg deleted file mode 100644 index 79720669bfdc79c2066b0ffc9e3c635840f1ee07..0000000000000000000000000000000000000000 --- a/stf/TEMP/00025.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1135eded39a6e6968cffbbcc733b3de3733fa8824dcca14df9b9a1e6243b9891 -size 130 diff --git a/stf/TEMP/00026.jpg b/stf/TEMP/00026.jpg deleted file mode 100644 index 47425a1c4243260add7473dc4d1163964ef1f7fe..0000000000000000000000000000000000000000 --- a/stf/TEMP/00026.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0498a2efffd5caae3a93a4a29113ebe477933a36ce5be6e1e2e64db93a7b6dea -size 130 diff --git a/stf/TEMP/00027.jpg b/stf/TEMP/00027.jpg deleted file mode 100644 index 93e43cb0b89a0af3d7120053d3e4b76b358b58c6..0000000000000000000000000000000000000000 --- a/stf/TEMP/00027.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5969e18cb5589919dff90fd09c5c9d4e70953c06f0899096238ef5981a33fb93 -size 130 diff --git a/stf/TEMP/00028.jpg b/stf/TEMP/00028.jpg deleted file mode 100644 index 4396f7c7ee3c6b440d8b07259a443974ebac98ba..0000000000000000000000000000000000000000 --- a/stf/TEMP/00028.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f967817175d4ea2b56efe1b668bcf495c4afeb633d99c25215db8ad59b898d23 -size 130 diff --git a/stf/TEMP/00029.jpg b/stf/TEMP/00029.jpg deleted file mode 100644 index 56d7211a563b9d79001ab1fdeeca942162c884f6..0000000000000000000000000000000000000000 --- a/stf/TEMP/00029.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7c636beda531f5a44d350fc2172082f1b0b583bf7e21eb54d79606c52ea54967 -size 130 diff --git a/stf/TEMP/00030.jpg b/stf/TEMP/00030.jpg deleted file mode 100644 index 46ae72a1f7fa802786b46b9f4bf4d4f33073197e..0000000000000000000000000000000000000000 --- a/stf/TEMP/00030.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4e006021880111441d644efe3ac7f41659f59c9525cbdeefdc163be1f0af35ca -size 130 diff --git a/stf/TEMP/00031.jpg b/stf/TEMP/00031.jpg deleted file mode 100644 index 758facf74b14ee70990f9d080fb41478a62e8ba9..0000000000000000000000000000000000000000 --- a/stf/TEMP/00031.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b1a72d8aaac722b8d4d150ee99075113c1b34777ac80a11568c4059305c88d79 -size 130 diff --git a/stf/TEMP/00032.jpg b/stf/TEMP/00032.jpg deleted file mode 100644 index 74d4b3c1f68febd2a300ee20001e29fbc1b7a8c8..0000000000000000000000000000000000000000 --- a/stf/TEMP/00032.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b6c3b4230f37fe14049967c828de965e8a421e83021b592ca5a4afcdf9c8a619 -size 130 diff --git a/stf/TEMP/00033.jpg b/stf/TEMP/00033.jpg deleted file mode 100644 index 6012d7bddcfa6a68805564bb82e3cefebb8fcb53..0000000000000000000000000000000000000000 --- a/stf/TEMP/00033.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:55f0c58bba27f11beb14a3fe245ab7c857c17e8e49756002accf44cf9c4e7f6d -size 130 diff --git a/stf/TEMP/00034.jpg b/stf/TEMP/00034.jpg deleted file mode 100644 index 2521241e42036f807a60660d6de050df63ff6b03..0000000000000000000000000000000000000000 --- a/stf/TEMP/00034.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:fd3f7c47c9d1bb6ffacb035b882fd6fc3e7d12f04547366ae05f900a92e9b569 -size 130 diff --git a/stf/TEMP/00035.jpg b/stf/TEMP/00035.jpg deleted file mode 100644 index be7f4da28b8fd72acb06d5710384e9f36ce2da91..0000000000000000000000000000000000000000 --- a/stf/TEMP/00035.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b269e2dee4a15b72fa7d3346924b6c483274280f22ff593fe2d40c026f80098a -size 130 diff --git a/stf/TEMP/00036.jpg b/stf/TEMP/00036.jpg deleted file mode 100644 index de04d273f352bad1d28cd555d7259bf33a77fd5f..0000000000000000000000000000000000000000 --- a/stf/TEMP/00036.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2cfa9f35c42347804ac70e9b77272251ab7ff1f55def900c3e2d5d1222b50408 -size 130 diff --git a/stf/TEMP/00037.jpg b/stf/TEMP/00037.jpg deleted file mode 100644 index 966978ba679a50e1e9a525b905fdc42a9805b7e5..0000000000000000000000000000000000000000 --- a/stf/TEMP/00037.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:377504eacd3481a61d2495d9bd5e0d48ae0bc626d5790f62f8735bfd78e7efc4 -size 130 diff --git a/stf/TEMP/00038.jpg b/stf/TEMP/00038.jpg deleted file mode 100644 index 7bd8452b8df0f7296236955909aee4a7866354d3..0000000000000000000000000000000000000000 --- a/stf/TEMP/00038.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:603727a44092ee1dffbcb8da25d3a5d0ca17d13d5cdca5ea182799caa276ba7d -size 130 diff --git a/stf/TEMP/00039.jpg b/stf/TEMP/00039.jpg deleted file mode 100644 index 629733581ed2cb9d0ece7bda756cb003134d7b57..0000000000000000000000000000000000000000 --- a/stf/TEMP/00039.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9376868934b731e519d25712974cac0bf40711342cdfc7ad1e7c5916fb18dbf2 -size 130 diff --git a/stf/TEMP/00040.jpg b/stf/TEMP/00040.jpg deleted file mode 100644 index 2c85dd1df14d63b12898e8aedeced1812f6b7d94..0000000000000000000000000000000000000000 --- a/stf/TEMP/00040.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:06c18cad176316812adbabf7d14e695ac1e75b8ec7508d72a61ed3ebb596f276 -size 130 diff --git a/stf/TEMP/00041.jpg b/stf/TEMP/00041.jpg deleted file mode 100644 index 53ed2ca36e6a58b97b0096719723b55abb323607..0000000000000000000000000000000000000000 --- a/stf/TEMP/00041.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f6793182960f1b5cefe3776e4c067faa3bb62f34cdbdfe337d6211336bbdcc38 -size 130 diff --git a/stf/TEMP/00042.jpg b/stf/TEMP/00042.jpg deleted file mode 100644 index 65783dcb1229c770b33cb6a0716652fcedfe495b..0000000000000000000000000000000000000000 --- a/stf/TEMP/00042.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c539b1a235731535fa7e1dda19c55f9b1aaf461612ef2aec73ca55bae6402813 -size 130 diff --git a/stf/TEMP/00043.jpg b/stf/TEMP/00043.jpg deleted file mode 100644 index d16a2e393399c2bf2ae53a4f9375a90f190c65f5..0000000000000000000000000000000000000000 --- a/stf/TEMP/00043.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:69d79d981c00a906ede5186b5984ebfdb7ed2d82156c2823e1bd4a75f408d42c -size 130 diff --git a/stf/TEMP/00044.jpg b/stf/TEMP/00044.jpg deleted file mode 100644 index 4b159354b8cc32989bd90a0677403724aff2efc0..0000000000000000000000000000000000000000 --- a/stf/TEMP/00044.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6fa2df669a2da4ea7813c55c80835ecbe7268f5eae0cbcb0502bcaa70d3f30d4 -size 130 diff --git a/stf/TEMP/00045.jpg b/stf/TEMP/00045.jpg deleted file mode 100644 index 5d010fafe431ee5947775fed79261b697f411ffa..0000000000000000000000000000000000000000 --- a/stf/TEMP/00045.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f8ccf93576f3811a22562a3c150248653c614e0a2bbcda4d138cb9c61968b73d -size 130 diff --git a/stf/TEMP/00046.jpg b/stf/TEMP/00046.jpg deleted file mode 100644 index d690c0f1fe7d6ef3de1b9872a662fa8ad546b530..0000000000000000000000000000000000000000 --- a/stf/TEMP/00046.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:662c5eb2c15150cf6e43226fe14c91bf6140e9b2a2cb3811bdf88cfa459531d0 -size 130 diff --git a/stf/TEMP/00047.jpg b/stf/TEMP/00047.jpg deleted file mode 100644 index 14eabb42fe1dcd5415bc19d91aa42b2671c730d2..0000000000000000000000000000000000000000 --- a/stf/TEMP/00047.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6aa3e823829da30a8203090f70b2e5f86aa599ea858c9d356f22e7072953e09e -size 130 diff --git a/stf/TEMP/00048.jpg b/stf/TEMP/00048.jpg deleted file mode 100644 index 868d8c182572c674efbfa8a6193b4dd9d2263873..0000000000000000000000000000000000000000 --- a/stf/TEMP/00048.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8be037682ccdda11a92b04978e49de978d5911fa40ddbba6b44414a448ee2ffc -size 130 diff --git a/stf/TEMP/00049.jpg b/stf/TEMP/00049.jpg deleted file mode 100644 index fdff85569adfc61464574fad44b823eec72a76d0..0000000000000000000000000000000000000000 --- a/stf/TEMP/00049.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5df726991798fd3849a6c73d911b0e4ecfedcf6a2ca7124c7533d55c94a6cfd8 -size 130 diff --git a/stf/TEMP/00050.jpg b/stf/TEMP/00050.jpg deleted file mode 100644 index c9503fc41a3a7696faf6c5424676c111642f7ee7..0000000000000000000000000000000000000000 --- a/stf/TEMP/00050.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ce00fff4cf1b0d76c4a6879d7d37c4561cedcd87297d08fb094eeba49533f893 -size 130 diff --git a/stf/TEMP/00051.jpg b/stf/TEMP/00051.jpg deleted file mode 100644 index c36f2544a18ac1b6a6aceccb2e40e7f5ab9272d2..0000000000000000000000000000000000000000 --- a/stf/TEMP/00051.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:dd2208003717335719c61de6d3201d05029eb268490adf41acecfe1eacdcffbb -size 130 diff --git a/stf/TEMP/00052.jpg b/stf/TEMP/00052.jpg deleted file mode 100644 index ae6ad95f944b00f72ab9c491aa1b6e39c947f428..0000000000000000000000000000000000000000 --- a/stf/TEMP/00052.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b350964cdf25d02b7ccd9afb2195204bdf46cb36f035b3bfebd3d54f97f9cc0f -size 130 diff --git a/stf/TEMP/00053.jpg b/stf/TEMP/00053.jpg deleted file mode 100644 index 94a1cbaa57ac85ee462d4b1384b321f2d0ef9c04..0000000000000000000000000000000000000000 --- a/stf/TEMP/00053.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ecf9a2eb609b4a3f33333550347aaf5e72067edaf91c79aee43c6012d30ab717 -size 130 diff --git a/stf/TEMP/00054.jpg b/stf/TEMP/00054.jpg deleted file mode 100644 index 94ca9b4de5a14f4c0596fcebecb54e88af7b382e..0000000000000000000000000000000000000000 --- a/stf/TEMP/00054.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5ec17a1a77199f75281af63eef2a1336ff5f52f1f7997e604cfc74ef8ca57cf1 -size 130 diff --git a/stf/TEMP/00055.jpg b/stf/TEMP/00055.jpg deleted file mode 100644 index 2c313dc6d6d551785f94402d04ad6293fcecde03..0000000000000000000000000000000000000000 --- a/stf/TEMP/00055.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:59823a6e835059c505c2a097c0425190cef9acb199ca07d69e1e5342acbee468 -size 130 diff --git a/stf/TEMP/00056.jpg b/stf/TEMP/00056.jpg deleted file mode 100644 index e8591f03b2b630710b25edee3b803732531925db..0000000000000000000000000000000000000000 --- a/stf/TEMP/00056.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f7a60a7b65b8cf7d313cc3fdf4029008001cb3c8ce3360b0b533e7ebea81edce -size 130 diff --git a/stf/TEMP/00057.jpg b/stf/TEMP/00057.jpg deleted file mode 100644 index 0867f5c73566bd70f799dc958e2885f009d95f6d..0000000000000000000000000000000000000000 --- a/stf/TEMP/00057.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4e31e804c8c2ef9f0b9057a9cb78db02d39bc6db1d90afd03a8abdf44c24baab -size 130 diff --git a/stf/TEMP/00058.jpg b/stf/TEMP/00058.jpg deleted file mode 100644 index 71120e43a8eef5f9929156047f2a3b894b13770b..0000000000000000000000000000000000000000 --- a/stf/TEMP/00058.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:649ece37dab5e317f4e6cab2128a35b31331f67181d418743eb9e80e0156b0ff -size 130 diff --git a/stf/TEMP/00059.jpg b/stf/TEMP/00059.jpg deleted file mode 100644 index 31c30704b6d271ee4df763d06d8b0e7052264981..0000000000000000000000000000000000000000 --- a/stf/TEMP/00059.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3dd6d4d28a15546cf8cf267897d7a51481e0d2eb144a09bfd2f0ac09f6c40041 -size 130 diff --git a/stf/TEMP/00060.jpg b/stf/TEMP/00060.jpg deleted file mode 100644 index 4f86563919eb73ae672d272685b6f6c4489970a7..0000000000000000000000000000000000000000 --- a/stf/TEMP/00060.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:37f74f036eb830ff03ca99426a090dc98c5a8198be8fc8eafd4add4219a2a090 -size 130 diff --git a/stf/TEMP/00061.jpg b/stf/TEMP/00061.jpg deleted file mode 100644 index 024c76eb11b37fb1a1d549f96907f14b980bd764..0000000000000000000000000000000000000000 --- a/stf/TEMP/00061.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2735c00fc2345e708370b2e3821e4930d16e2c6f8c42018de9970a6d47688940 -size 130 diff --git a/stf/TEMP/00062.jpg b/stf/TEMP/00062.jpg deleted file mode 100644 index 2f0c4a44c4e14abea7351a34d5e7822a9be1ba64..0000000000000000000000000000000000000000 --- a/stf/TEMP/00062.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:51b8841507500d86a1a32e778f33a390390af307a0cc43ca82268cfd7cde256b -size 130 diff --git a/stf/TEMP/00063.jpg b/stf/TEMP/00063.jpg deleted file mode 100644 index c633284aca6d157fdb910e47fbe0d7c9e5b1cfc6..0000000000000000000000000000000000000000 --- a/stf/TEMP/00063.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:351f5286f595e366627b935a49c90fb967d40be7720248b32adeb50959fc0682 -size 130 diff --git a/stf/TEMP/0157.pth b/stf/TEMP/0157.pth deleted file mode 100644 index e8babbc3efe379d796bc911765e736370ce1fa52..0000000000000000000000000000000000000000 --- a/stf/TEMP/0157.pth +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0ffc6eb4818dff551254ea8405a24407dd14a5f2fb0eea9cd1e01743d37c6b41 -size 135 diff --git a/stf/TEMP/2DFAN4-cd938726ad.zip b/stf/TEMP/2DFAN4-cd938726ad.zip deleted file mode 100644 index 0c386b8b1e15c3df9edef0130ef3e59c70305ba8..0000000000000000000000000000000000000000 --- a/stf/TEMP/2DFAN4-cd938726ad.zip +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5d2c335aa68dd42b5be255afafcd8a9a059653075feea7523639debfe0b8382e -size 133 diff --git a/stf/TEMP/3DFAN4-4a694010b9.zip b/stf/TEMP/3DFAN4-4a694010b9.zip deleted file mode 100644 index 793903841a9b711b8ae4d816287de4a01a9c6a4e..0000000000000000000000000000000000000000 --- a/stf/TEMP/3DFAN4-4a694010b9.zip +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b346cfdd6f9ba086738e0b6836f617bb24ea6d95cb86dbf16702172996fd55f4 -size 133 diff --git a/stf/TEMP/Cam2_2309071202_0012_Natural_Looped.mp4 b/stf/TEMP/Cam2_2309071202_0012_Natural_Looped.mp4 deleted file mode 100644 index 25fa4c9f163ef49e63c49f61d87494000a1ce26f..0000000000000000000000000000000000000000 --- a/stf/TEMP/Cam2_2309071202_0012_Natural_Looped.mp4 +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9ab13f299c268d289585671aee5dea03e96facc582ec49a64ba66099635b8f22 -size 134 diff --git a/stf/TEMP/depth-6c4283c0e0.zip b/stf/TEMP/depth-6c4283c0e0.zip deleted file mode 100644 index 297bd066578d71700fd5702eb95b6511974d7381..0000000000000000000000000000000000000000 --- a/stf/TEMP/depth-6c4283c0e0.zip +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d76eab4757f77a777488afe94900f6e1bc5e53a5af906e7d5acff8b9967c8d66 -size 134 diff --git a/stf/TEMP/front_config_v3.json b/stf/TEMP/front_config_v3.json deleted file mode 100644 index a794fcac5564ccea4f97fda53bbd7fef779474d3..0000000000000000000000000000000000000000 --- a/stf/TEMP/front_config_v3.json +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:34770b9b9d647813bbe2313300cc8ec8e2f5e3a53dd28d7c5ec003b1897d74b1 -size 128 diff --git a/stf/TEMP/ref.jpg b/stf/TEMP/ref.jpg deleted file mode 100644 index f9abfe7767ee9e16d7e42a71267a65e1abca5f22..0000000000000000000000000000000000000000 --- a/stf/TEMP/ref.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7c634b30069a7ef48875a74625915cda008cfea220669e3c50d402c0f46761f5 -size 130 diff --git a/stf/TEMP/s3fd-619a316812.pth b/stf/TEMP/s3fd-619a316812.pth deleted file mode 100644 index d7e71b3fa53988f41ed2c5497189f2316e59c016..0000000000000000000000000000000000000000 --- a/stf/TEMP/s3fd-619a316812.pth +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7636d0c9d2a8f4759aef537cbcc25c5fa2eb2d5d80b1fada4dcc800e967cf381 -size 133 diff --git a/stf/convert.py b/stf/convert.py deleted file mode 100644 index 033f7a65de710a426db4c8807bb6286af2c079c1..0000000000000000000000000000000000000000 --- a/stf/convert.py +++ /dev/null @@ -1,20 +0,0 @@ -import torch -import numpy as np - -def convert(): - state_dict = torch.load("mnist_cnn.pt") - - tensor = { - key: tensor.cpu().numpy() for key, tensor in state_dict.items() - } - - for key, value in tensor.items(): - print(key, value.shape) - - np.savez("mnist.npz", **tensor) - -def main(): - convert() - -if __name__ == "__main__": - main() diff --git a/stf/front_config.json b/stf/front_config.json deleted file mode 100644 index 374c78a843c6d8726dc876561527af20edc43f9b..0000000000000000000000000000000000000000 --- a/stf/front_config.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "nasilhong_f_v1_front", - "batch_size": 16, - "num_workers": 16, - "mel_step_size": 108, - "mel_ps": 80, - "img_size": 352, - "mask_ver": "pwb_front_v39_1_2", - "num_ips": 2, - "mask_img_trsf_ver": 0, - "mel_trsf_ver": -1, - "mel_norm_ver": -1, - "lr": 1, - "crop_offset_y": 0.08, - "crop_margin": 0.35, - "crop_start_frame": 3, - "model_type": "stf_v3" -} - diff --git a/stf/front_config_v3.json b/stf/front_config_v3.json deleted file mode 100644 index eedf2a2b1a5eb4094f9271414434cee81c673c41..0000000000000000000000000000000000000000 --- a/stf/front_config_v3.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "name": "Ian_v3_front", - "batch_size": 16, - "num_workers": 16, - "mel_step_size": 108, - "mel_ps": 80, - "img_size": 352, - "mask_ver": "pwb_front_v39_1_wide", - "num_ips": 2, - "mask_img_trsf_ver": 0, - "mel_trsf_ver": -1, - "mel_norm_ver": -1, - "lr": 1, - "crop_offset_y": 0.08, - "crop_margin": 0.35, - "crop_start_frame": 3, - "model_type":"stf_v3", - "compose":"compose_smooth", - "image_resize":"True", - "compose_box": "face_only", - "compose_args": { - "blur_ratio": 0.3, - "dilate_ratio": 0.3 - } -} \ No newline at end of file diff --git a/stf/models--Systran--faster-whisper-large-v3/.DS_Store b/stf/models--Systran--faster-whisper-large-v3/.DS_Store deleted file mode 100644 index 14c26d5a0f8b405268228ae81557591a62dd08e1..0000000000000000000000000000000000000000 Binary files a/stf/models--Systran--faster-whisper-large-v3/.DS_Store and /dev/null differ diff --git a/stf/models--Systran--faster-whisper-large-v3/snapshots/.DS_Store b/stf/models--Systran--faster-whisper-large-v3/snapshots/.DS_Store deleted file mode 100644 index 95aca6e8430acb4809a353fa51afbc167e71ff91..0000000000000000000000000000000000000000 Binary files a/stf/models--Systran--faster-whisper-large-v3/snapshots/.DS_Store and /dev/null differ diff --git a/stf/models--Systran--faster-whisper-large-v3/snapshots/edaa852ec7e145841d8ffdb056a99866b5f0a478/config.json b/stf/models--Systran--faster-whisper-large-v3/snapshots/edaa852ec7e145841d8ffdb056a99866b5f0a478/config.json deleted file mode 100644 index 75336feae814999bae6ccccdecf177639ffc6f9d..0000000000000000000000000000000000000000 --- a/stf/models--Systran--faster-whisper-large-v3/snapshots/edaa852ec7e145841d8ffdb056a99866b5f0a478/config.json +++ /dev/null @@ -1,240 +0,0 @@ -{ - "alignment_heads": [ - [ - 7, - 0 - ], - [ - 10, - 17 - ], - [ - 12, - 18 - ], - [ - 13, - 12 - ], - [ - 16, - 1 - ], - [ - 17, - 14 - ], - [ - 19, - 11 - ], - [ - 21, - 4 - ], - [ - 24, - 1 - ], - [ - 25, - 6 - ] - ], - "lang_ids": [ - 50259, - 50260, - 50261, - 50262, - 50263, - 50264, - 50265, - 50266, - 50267, - 50268, - 50269, - 50270, - 50271, - 50272, - 50273, - 50274, - 50275, - 50276, - 50277, - 50278, - 50279, - 50280, - 50281, - 50282, - 50283, - 50284, - 50285, - 50286, - 50287, - 50288, - 50289, - 50290, - 50291, - 50292, - 50293, - 50294, - 50295, - 50296, - 50297, - 50298, - 50299, - 50300, - 50301, - 50302, - 50303, - 50304, - 50305, - 50306, - 50307, - 50308, - 50309, - 50310, - 50311, - 50312, - 50313, - 50314, - 50315, - 50316, - 50317, - 50318, - 50319, - 50320, - 50321, - 50322, - 50323, - 50324, - 50325, - 50326, - 50327, - 50328, - 50329, - 50330, - 50331, - 50332, - 50333, - 50334, - 50335, - 50336, - 50337, - 50338, - 50339, - 50340, - 50341, - 50342, - 50343, - 50344, - 50345, - 50346, - 50347, - 50348, - 50349, - 50350, - 50351, - 50352, - 50353, - 50354, - 50355, - 50356, - 50357, - 50358 - ], - "suppress_ids": [ - 1, - 2, - 7, - 8, - 9, - 10, - 14, - 25, - 26, - 27, - 28, - 29, - 31, - 58, - 59, - 60, - 61, - 62, - 63, - 90, - 91, - 92, - 93, - 359, - 503, - 522, - 542, - 873, - 893, - 902, - 918, - 922, - 931, - 1350, - 1853, - 1982, - 2460, - 2627, - 3246, - 3253, - 3268, - 3536, - 3846, - 3961, - 4183, - 4667, - 6585, - 6647, - 7273, - 9061, - 9383, - 10428, - 10929, - 11938, - 12033, - 12331, - 12562, - 13793, - 14157, - 14635, - 15265, - 15618, - 16553, - 16604, - 18362, - 18956, - 20075, - 21675, - 22520, - 26130, - 26161, - 26435, - 28279, - 29464, - 31650, - 32302, - 32470, - 36865, - 42863, - 47425, - 49870, - 50254, - 50258, - 50359, - 50360, - 50361, - 50362, - 50363 - ], - "suppress_ids_begin": [ - 220, - 50257 - ] -} diff --git a/stf/models--Systran--faster-whisper-large-v3/snapshots/edaa852ec7e145841d8ffdb056a99866b5f0a478/model.bin b/stf/models--Systran--faster-whisper-large-v3/snapshots/edaa852ec7e145841d8ffdb056a99866b5f0a478/model.bin deleted file mode 100644 index ae2e2d2596eaa0e5eff6a13da53ee1848a4a75cf..0000000000000000000000000000000000000000 --- a/stf/models--Systran--faster-whisper-large-v3/snapshots/edaa852ec7e145841d8ffdb056a99866b5f0a478/model.bin +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6405bd9a4a9d57eb0fb2050ce167c62bae7adf64a340ae3c26a2276033cd8766 -size 135 diff --git a/stf/models--Systran--faster-whisper-large-v3/snapshots/edaa852ec7e145841d8ffdb056a99866b5f0a478/preprocessor_config.json b/stf/models--Systran--faster-whisper-large-v3/snapshots/edaa852ec7e145841d8ffdb056a99866b5f0a478/preprocessor_config.json deleted file mode 100644 index 931c77a740890c46365c7ae0c9d350ba3cca908f..0000000000000000000000000000000000000000 --- a/stf/models--Systran--faster-whisper-large-v3/snapshots/edaa852ec7e145841d8ffdb056a99866b5f0a478/preprocessor_config.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "chunk_length": 30, - "feature_extractor_type": "WhisperFeatureExtractor", - "feature_size": 128, - "hop_length": 160, - "n_fft": 400, - "n_samples": 480000, - "nb_max_frames": 3000, - "padding_side": "right", - "padding_value": 0.0, - "processor_class": "WhisperProcessor", - "return_attention_mask": false, - "sampling_rate": 16000 -} diff --git a/stf/models--Systran--faster-whisper-large-v3/snapshots/edaa852ec7e145841d8ffdb056a99866b5f0a478/tokenizer.json b/stf/models--Systran--faster-whisper-large-v3/snapshots/edaa852ec7e145841d8ffdb056a99866b5f0a478/tokenizer.json deleted file mode 100644 index 3a5e2ba63acdcac9a19ba56cf9bd27f185bfff61..0000000000000000000000000000000000000000 --- a/stf/models--Systran--faster-whisper-large-v3/snapshots/edaa852ec7e145841d8ffdb056a99866b5f0a478/tokenizer.json +++ /dev/null @@ -1,114861 +0,0 @@ -{ - "version": "1.0", - "truncation": null, - "padding": null, - "added_tokens": [ - { - "id": 50257, - "content": "<|endoftext|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50258, - "content": "<|startoftranscript|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50259, - "content": "<|en|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50260, - "content": "<|zh|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50261, - "content": "<|de|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50262, - "content": "<|es|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50263, - "content": "<|ru|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50264, - "content": "<|ko|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50265, - "content": "<|fr|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50266, - "content": "<|ja|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50267, - "content": "<|pt|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50268, - "content": "<|tr|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50269, - "content": "<|pl|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50270, - "content": "<|ca|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50271, - "content": "<|nl|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50272, - "content": "<|ar|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50273, - "content": "<|sv|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50274, - "content": "<|it|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50275, - "content": "<|id|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50276, - "content": "<|hi|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50277, - "content": "<|fi|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50278, - "content": "<|vi|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50279, - "content": "<|he|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50280, - "content": "<|uk|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50281, - "content": "<|el|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50282, - "content": "<|ms|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50283, - "content": "<|cs|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50284, - "content": "<|ro|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50285, - "content": "<|da|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50286, - "content": "<|hu|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50287, - "content": "<|ta|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50288, - "content": "<|no|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50289, - "content": "<|th|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50290, - "content": "<|ur|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50291, - "content": "<|hr|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50292, - "content": "<|bg|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50293, - "content": "<|lt|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50294, - "content": "<|la|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50295, - "content": "<|mi|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50296, - "content": "<|ml|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50297, - "content": "<|cy|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50298, - "content": "<|sk|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50299, - "content": "<|te|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50300, - "content": "<|fa|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50301, - "content": "<|lv|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50302, - "content": "<|bn|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50303, - "content": "<|sr|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50304, - "content": "<|az|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50305, - "content": "<|sl|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50306, - "content": "<|kn|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50307, - "content": "<|et|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50308, - "content": "<|mk|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50309, - "content": "<|br|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50310, - "content": "<|eu|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50311, - "content": "<|is|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50312, - "content": "<|hy|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50313, - "content": "<|ne|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50314, - "content": "<|mn|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50315, - "content": "<|bs|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50316, - "content": "<|kk|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50317, - "content": "<|sq|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50318, - "content": "<|sw|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50319, - "content": "<|gl|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50320, - "content": "<|mr|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50321, - "content": "<|pa|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50322, - "content": "<|si|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50323, - "content": "<|km|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50324, - "content": "<|sn|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50325, - "content": "<|yo|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50326, - "content": "<|so|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50327, - "content": "<|af|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50328, - "content": "<|oc|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50329, - "content": "<|ka|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50330, - "content": "<|be|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50331, - "content": "<|tg|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50332, - "content": "<|sd|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50333, - "content": "<|gu|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50334, - "content": "<|am|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50335, - "content": "<|yi|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50336, - "content": "<|lo|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50337, - "content": "<|uz|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50338, - "content": "<|fo|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50339, - "content": "<|ht|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50340, - "content": "<|ps|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50341, - "content": "<|tk|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50342, - "content": "<|nn|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50343, - "content": "<|mt|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50344, - "content": "<|sa|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50345, - "content": "<|lb|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50346, - "content": "<|my|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50347, - "content": "<|bo|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50348, - "content": "<|tl|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50349, - "content": "<|mg|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50350, - "content": "<|as|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50351, - "content": "<|tt|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50352, - "content": "<|haw|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50353, - "content": "<|ln|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50354, - "content": "<|ha|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50355, - "content": "<|ba|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50356, - "content": "<|jw|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50357, - "content": "<|su|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50358, - "content": "<|yue|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50359, - "content": "<|translate|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50360, - "content": "<|transcribe|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50361, - "content": "<|startoflm|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50362, - "content": "<|startofprev|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50363, - "content": "<|nospeech|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50364, - "content": "<|notimestamps|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": false, - "special": true - }, - { - "id": 50365, - "content": "<|0.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50366, - "content": "<|0.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50367, - "content": "<|0.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50368, - "content": "<|0.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50369, - "content": "<|0.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50370, - "content": "<|0.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50371, - "content": "<|0.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50372, - "content": "<|0.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50373, - "content": "<|0.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50374, - "content": "<|0.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50375, - "content": "<|0.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50376, - "content": "<|0.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50377, - "content": "<|0.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50378, - "content": "<|0.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50379, - "content": "<|0.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50380, - "content": "<|0.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50381, - "content": "<|0.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50382, - "content": "<|0.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50383, - "content": "<|0.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50384, - "content": "<|0.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50385, - "content": "<|0.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50386, - "content": "<|0.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50387, - "content": "<|0.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50388, - "content": "<|0.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50389, - "content": "<|0.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50390, - "content": "<|0.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50391, - "content": "<|0.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50392, - "content": "<|0.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50393, - "content": "<|0.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50394, - "content": "<|0.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50395, - "content": "<|0.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50396, - "content": "<|0.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50397, - "content": "<|0.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50398, - "content": "<|0.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50399, - "content": "<|0.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50400, - "content": "<|0.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50401, - "content": "<|0.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50402, - "content": "<|0.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50403, - "content": "<|0.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50404, - "content": "<|0.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50405, - "content": "<|0.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50406, - "content": "<|0.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50407, - "content": "<|0.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50408, - "content": "<|0.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50409, - "content": "<|0.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50410, - "content": "<|0.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50411, - "content": "<|0.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50412, - "content": "<|0.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50413, - "content": "<|0.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50414, - "content": "<|0.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50415, - "content": "<|1.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50416, - "content": "<|1.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50417, - "content": "<|1.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50418, - "content": "<|1.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50419, - "content": "<|1.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50420, - "content": "<|1.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50421, - "content": "<|1.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50422, - "content": "<|1.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50423, - "content": "<|1.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50424, - "content": "<|1.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50425, - "content": "<|1.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50426, - "content": "<|1.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50427, - "content": "<|1.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50428, - "content": "<|1.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50429, - "content": "<|1.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50430, - "content": "<|1.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50431, - "content": "<|1.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50432, - "content": "<|1.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50433, - "content": "<|1.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50434, - "content": "<|1.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50435, - "content": "<|1.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50436, - "content": "<|1.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50437, - "content": "<|1.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50438, - "content": "<|1.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50439, - "content": "<|1.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50440, - "content": "<|1.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50441, - "content": "<|1.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50442, - "content": "<|1.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50443, - "content": "<|1.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50444, - "content": "<|1.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50445, - "content": "<|1.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50446, - "content": "<|1.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50447, - "content": "<|1.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50448, - "content": "<|1.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50449, - "content": "<|1.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50450, - "content": "<|1.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50451, - "content": "<|1.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50452, - "content": "<|1.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50453, - "content": "<|1.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50454, - "content": "<|1.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50455, - "content": "<|1.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50456, - "content": "<|1.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50457, - "content": "<|1.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50458, - "content": "<|1.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50459, - "content": "<|1.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50460, - "content": "<|1.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50461, - "content": "<|1.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50462, - "content": "<|1.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50463, - "content": "<|1.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50464, - "content": "<|1.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50465, - "content": "<|2.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50466, - "content": "<|2.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50467, - "content": "<|2.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50468, - "content": "<|2.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50469, - "content": "<|2.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50470, - "content": "<|2.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50471, - "content": "<|2.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50472, - "content": "<|2.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50473, - "content": "<|2.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50474, - "content": "<|2.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50475, - "content": "<|2.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50476, - "content": "<|2.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50477, - "content": "<|2.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50478, - "content": "<|2.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50479, - "content": "<|2.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50480, - "content": "<|2.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50481, - "content": "<|2.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50482, - "content": "<|2.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50483, - "content": "<|2.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50484, - "content": "<|2.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50485, - "content": "<|2.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50486, - "content": "<|2.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50487, - "content": "<|2.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50488, - "content": "<|2.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50489, - "content": "<|2.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50490, - "content": "<|2.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50491, - "content": "<|2.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50492, - "content": "<|2.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50493, - "content": "<|2.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50494, - "content": "<|2.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50495, - "content": "<|2.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50496, - "content": "<|2.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50497, - "content": "<|2.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50498, - "content": "<|2.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50499, - "content": "<|2.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50500, - "content": "<|2.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50501, - "content": "<|2.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50502, - "content": "<|2.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50503, - "content": "<|2.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50504, - "content": "<|2.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50505, - "content": "<|2.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50506, - "content": "<|2.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50507, - "content": "<|2.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50508, - "content": "<|2.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50509, - "content": "<|2.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50510, - "content": "<|2.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50511, - "content": "<|2.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50512, - "content": "<|2.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50513, - "content": "<|2.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50514, - "content": "<|2.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50515, - "content": "<|3.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50516, - "content": "<|3.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50517, - "content": "<|3.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50518, - "content": "<|3.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50519, - "content": "<|3.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50520, - "content": "<|3.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50521, - "content": "<|3.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50522, - "content": "<|3.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50523, - "content": "<|3.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50524, - "content": "<|3.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50525, - "content": "<|3.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50526, - "content": "<|3.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50527, - "content": "<|3.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50528, - "content": "<|3.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50529, - "content": "<|3.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50530, - "content": "<|3.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50531, - "content": "<|3.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50532, - "content": "<|3.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50533, - "content": "<|3.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50534, - "content": "<|3.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50535, - "content": "<|3.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50536, - "content": "<|3.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50537, - "content": "<|3.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50538, - "content": "<|3.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50539, - "content": "<|3.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50540, - "content": "<|3.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50541, - "content": "<|3.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50542, - "content": "<|3.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50543, - "content": "<|3.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50544, - "content": "<|3.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50545, - "content": "<|3.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50546, - "content": "<|3.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50547, - "content": "<|3.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50548, - "content": "<|3.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50549, - "content": "<|3.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50550, - "content": "<|3.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50551, - "content": "<|3.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50552, - "content": "<|3.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50553, - "content": "<|3.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50554, - "content": "<|3.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50555, - "content": "<|3.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50556, - "content": "<|3.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50557, - "content": "<|3.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50558, - "content": "<|3.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50559, - "content": "<|3.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50560, - "content": "<|3.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50561, - "content": "<|3.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50562, - "content": "<|3.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50563, - "content": "<|3.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50564, - "content": "<|3.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50565, - "content": "<|4.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50566, - "content": "<|4.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50567, - "content": "<|4.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50568, - "content": "<|4.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50569, - "content": "<|4.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50570, - "content": "<|4.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50571, - "content": "<|4.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50572, - "content": "<|4.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50573, - "content": "<|4.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50574, - "content": "<|4.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50575, - "content": "<|4.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50576, - "content": "<|4.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50577, - "content": "<|4.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50578, - "content": "<|4.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50579, - "content": "<|4.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50580, - "content": "<|4.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50581, - "content": "<|4.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50582, - "content": "<|4.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50583, - "content": "<|4.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50584, - "content": "<|4.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50585, - "content": "<|4.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50586, - "content": "<|4.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50587, - "content": "<|4.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50588, - "content": "<|4.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50589, - "content": "<|4.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50590, - "content": "<|4.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50591, - "content": "<|4.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50592, - "content": "<|4.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50593, - "content": "<|4.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50594, - "content": "<|4.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50595, - "content": "<|4.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50596, - "content": "<|4.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50597, - "content": "<|4.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50598, - "content": "<|4.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50599, - "content": "<|4.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50600, - "content": "<|4.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50601, - "content": "<|4.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50602, - "content": "<|4.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50603, - "content": "<|4.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50604, - "content": "<|4.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50605, - "content": "<|4.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50606, - "content": "<|4.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50607, - "content": "<|4.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50608, - "content": "<|4.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50609, - "content": "<|4.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50610, - "content": "<|4.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50611, - "content": "<|4.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50612, - "content": "<|4.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50613, - "content": "<|4.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50614, - "content": "<|4.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50615, - "content": "<|5.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50616, - "content": "<|5.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50617, - "content": "<|5.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50618, - "content": "<|5.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50619, - "content": "<|5.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50620, - "content": "<|5.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50621, - "content": "<|5.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50622, - "content": "<|5.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50623, - "content": "<|5.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50624, - "content": "<|5.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50625, - "content": "<|5.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50626, - "content": "<|5.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50627, - "content": "<|5.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50628, - "content": "<|5.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50629, - "content": "<|5.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50630, - "content": "<|5.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50631, - "content": "<|5.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50632, - "content": "<|5.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50633, - "content": "<|5.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50634, - "content": "<|5.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50635, - "content": "<|5.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50636, - "content": "<|5.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50637, - "content": "<|5.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50638, - "content": "<|5.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50639, - "content": "<|5.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50640, - "content": "<|5.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50641, - "content": "<|5.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50642, - "content": "<|5.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50643, - "content": "<|5.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50644, - "content": "<|5.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50645, - "content": "<|5.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50646, - "content": "<|5.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50647, - "content": "<|5.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50648, - "content": "<|5.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50649, - "content": "<|5.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50650, - "content": "<|5.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50651, - "content": "<|5.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50652, - "content": "<|5.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50653, - "content": "<|5.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50654, - "content": "<|5.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50655, - "content": "<|5.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50656, - "content": "<|5.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50657, - "content": "<|5.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50658, - "content": "<|5.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50659, - "content": "<|5.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50660, - "content": "<|5.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50661, - "content": "<|5.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50662, - "content": "<|5.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50663, - "content": "<|5.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50664, - "content": "<|5.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50665, - "content": "<|6.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50666, - "content": "<|6.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50667, - "content": "<|6.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50668, - "content": "<|6.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50669, - "content": "<|6.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50670, - "content": "<|6.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50671, - "content": "<|6.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50672, - "content": "<|6.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50673, - "content": "<|6.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50674, - "content": "<|6.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50675, - "content": "<|6.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50676, - "content": "<|6.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50677, - "content": "<|6.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50678, - "content": "<|6.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50679, - "content": "<|6.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50680, - "content": "<|6.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50681, - "content": "<|6.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50682, - "content": "<|6.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50683, - "content": "<|6.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50684, - "content": "<|6.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50685, - "content": "<|6.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50686, - "content": "<|6.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50687, - "content": "<|6.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50688, - "content": "<|6.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50689, - "content": "<|6.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50690, - "content": "<|6.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50691, - "content": "<|6.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50692, - "content": "<|6.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50693, - "content": "<|6.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50694, - "content": "<|6.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50695, - "content": "<|6.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50696, - "content": "<|6.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50697, - "content": "<|6.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50698, - "content": "<|6.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50699, - "content": "<|6.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50700, - "content": "<|6.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50701, - "content": "<|6.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50702, - "content": "<|6.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50703, - "content": "<|6.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50704, - "content": "<|6.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50705, - "content": "<|6.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50706, - "content": "<|6.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50707, - "content": "<|6.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50708, - "content": "<|6.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50709, - "content": "<|6.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50710, - "content": "<|6.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50711, - "content": "<|6.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50712, - "content": "<|6.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50713, - "content": "<|6.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50714, - "content": "<|6.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50715, - "content": "<|7.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50716, - "content": "<|7.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50717, - "content": "<|7.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50718, - "content": "<|7.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50719, - "content": "<|7.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50720, - "content": "<|7.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50721, - "content": "<|7.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50722, - "content": "<|7.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50723, - "content": "<|7.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50724, - "content": "<|7.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50725, - "content": "<|7.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50726, - "content": "<|7.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50727, - "content": "<|7.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50728, - "content": "<|7.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50729, - "content": "<|7.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50730, - "content": "<|7.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50731, - "content": "<|7.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50732, - "content": "<|7.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50733, - "content": "<|7.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50734, - "content": "<|7.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50735, - "content": "<|7.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50736, - "content": "<|7.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50737, - "content": "<|7.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50738, - "content": "<|7.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50739, - "content": "<|7.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50740, - "content": "<|7.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50741, - "content": "<|7.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50742, - "content": "<|7.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50743, - "content": "<|7.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50744, - "content": "<|7.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50745, - "content": "<|7.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50746, - "content": "<|7.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50747, - "content": "<|7.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50748, - "content": "<|7.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50749, - "content": "<|7.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50750, - "content": "<|7.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50751, - "content": "<|7.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50752, - "content": "<|7.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50753, - "content": "<|7.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50754, - "content": "<|7.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50755, - "content": "<|7.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50756, - "content": "<|7.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50757, - "content": "<|7.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50758, - "content": "<|7.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50759, - "content": "<|7.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50760, - "content": "<|7.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50761, - "content": "<|7.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50762, - "content": "<|7.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50763, - "content": "<|7.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50764, - "content": "<|7.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50765, - "content": "<|8.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50766, - "content": "<|8.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50767, - "content": "<|8.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50768, - "content": "<|8.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50769, - "content": "<|8.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50770, - "content": "<|8.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50771, - "content": "<|8.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50772, - "content": "<|8.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50773, - "content": "<|8.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50774, - "content": "<|8.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50775, - "content": "<|8.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50776, - "content": "<|8.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50777, - "content": "<|8.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50778, - "content": "<|8.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50779, - "content": "<|8.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50780, - "content": "<|8.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50781, - "content": "<|8.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50782, - "content": "<|8.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50783, - "content": "<|8.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50784, - "content": "<|8.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50785, - "content": "<|8.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50786, - "content": "<|8.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50787, - "content": "<|8.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50788, - "content": "<|8.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50789, - "content": "<|8.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50790, - "content": "<|8.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50791, - "content": "<|8.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50792, - "content": "<|8.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50793, - "content": "<|8.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50794, - "content": "<|8.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50795, - "content": "<|8.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50796, - "content": "<|8.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50797, - "content": "<|8.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50798, - "content": "<|8.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50799, - "content": "<|8.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50800, - "content": "<|8.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50801, - "content": "<|8.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50802, - "content": "<|8.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50803, - "content": "<|8.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50804, - "content": "<|8.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50805, - "content": "<|8.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50806, - "content": "<|8.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50807, - "content": "<|8.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50808, - "content": "<|8.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50809, - "content": "<|8.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50810, - "content": "<|8.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50811, - "content": "<|8.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50812, - "content": "<|8.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50813, - "content": "<|8.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50814, - "content": "<|8.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50815, - "content": "<|9.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50816, - "content": "<|9.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50817, - "content": "<|9.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50818, - "content": "<|9.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50819, - "content": "<|9.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50820, - "content": "<|9.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50821, - "content": "<|9.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50822, - "content": "<|9.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50823, - "content": "<|9.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50824, - "content": "<|9.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50825, - "content": "<|9.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50826, - "content": "<|9.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50827, - "content": "<|9.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50828, - "content": "<|9.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50829, - "content": "<|9.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50830, - "content": "<|9.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50831, - "content": "<|9.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50832, - "content": "<|9.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50833, - "content": "<|9.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50834, - "content": "<|9.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50835, - "content": "<|9.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50836, - "content": "<|9.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50837, - "content": "<|9.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50838, - "content": "<|9.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50839, - "content": "<|9.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50840, - "content": "<|9.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50841, - "content": "<|9.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50842, - "content": "<|9.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50843, - "content": "<|9.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50844, - "content": "<|9.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50845, - "content": "<|9.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50846, - "content": "<|9.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50847, - "content": "<|9.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50848, - "content": "<|9.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50849, - "content": "<|9.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50850, - "content": "<|9.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50851, - "content": "<|9.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50852, - "content": "<|9.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50853, - "content": "<|9.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50854, - "content": "<|9.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50855, - "content": "<|9.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50856, - "content": "<|9.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50857, - "content": "<|9.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50858, - "content": "<|9.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50859, - "content": "<|9.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50860, - "content": "<|9.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50861, - "content": "<|9.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50862, - "content": "<|9.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50863, - "content": "<|9.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50864, - "content": "<|9.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50865, - "content": "<|10.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50866, - "content": "<|10.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50867, - "content": "<|10.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50868, - "content": "<|10.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50869, - "content": "<|10.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50870, - "content": "<|10.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50871, - "content": "<|10.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50872, - "content": "<|10.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50873, - "content": "<|10.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50874, - "content": "<|10.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50875, - "content": "<|10.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50876, - "content": "<|10.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50877, - "content": "<|10.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50878, - "content": "<|10.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50879, - "content": "<|10.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50880, - "content": "<|10.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50881, - "content": "<|10.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50882, - "content": "<|10.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50883, - "content": "<|10.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50884, - "content": "<|10.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50885, - "content": "<|10.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50886, - "content": "<|10.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50887, - "content": "<|10.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50888, - "content": "<|10.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50889, - "content": "<|10.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50890, - "content": "<|10.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50891, - "content": "<|10.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50892, - "content": "<|10.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50893, - "content": "<|10.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50894, - "content": "<|10.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50895, - "content": "<|10.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50896, - "content": "<|10.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50897, - "content": "<|10.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50898, - "content": "<|10.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50899, - "content": "<|10.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50900, - "content": "<|10.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50901, - "content": "<|10.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50902, - "content": "<|10.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50903, - "content": "<|10.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50904, - "content": "<|10.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50905, - "content": "<|10.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50906, - "content": "<|10.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50907, - "content": "<|10.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50908, - "content": "<|10.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50909, - "content": "<|10.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50910, - "content": "<|10.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50911, - "content": "<|10.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50912, - "content": "<|10.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50913, - "content": "<|10.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50914, - "content": "<|10.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50915, - "content": "<|11.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50916, - "content": "<|11.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50917, - "content": "<|11.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50918, - "content": "<|11.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50919, - "content": "<|11.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50920, - "content": "<|11.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50921, - "content": "<|11.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50922, - "content": "<|11.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50923, - "content": "<|11.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50924, - "content": "<|11.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50925, - "content": "<|11.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50926, - "content": "<|11.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50927, - "content": "<|11.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50928, - "content": "<|11.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50929, - "content": "<|11.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50930, - "content": "<|11.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50931, - "content": "<|11.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50932, - "content": "<|11.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50933, - "content": "<|11.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50934, - "content": "<|11.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50935, - "content": "<|11.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50936, - "content": "<|11.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50937, - "content": "<|11.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50938, - "content": "<|11.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50939, - "content": "<|11.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50940, - "content": "<|11.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50941, - "content": "<|11.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50942, - "content": "<|11.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50943, - "content": "<|11.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50944, - "content": "<|11.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50945, - "content": "<|11.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50946, - "content": "<|11.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50947, - "content": "<|11.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50948, - "content": "<|11.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50949, - "content": "<|11.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50950, - "content": "<|11.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50951, - "content": "<|11.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50952, - "content": "<|11.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50953, - "content": "<|11.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50954, - "content": "<|11.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50955, - "content": "<|11.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50956, - "content": "<|11.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50957, - "content": "<|11.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50958, - "content": "<|11.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50959, - "content": "<|11.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50960, - "content": "<|11.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50961, - "content": "<|11.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50962, - "content": "<|11.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50963, - "content": "<|11.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50964, - "content": "<|11.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50965, - "content": "<|12.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50966, - "content": "<|12.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50967, - "content": "<|12.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50968, - "content": "<|12.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50969, - "content": "<|12.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50970, - "content": "<|12.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50971, - "content": "<|12.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50972, - "content": "<|12.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50973, - "content": "<|12.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50974, - "content": "<|12.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50975, - "content": "<|12.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50976, - "content": "<|12.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50977, - "content": "<|12.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50978, - "content": "<|12.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50979, - "content": "<|12.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50980, - "content": "<|12.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50981, - "content": "<|12.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50982, - "content": "<|12.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50983, - "content": "<|12.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50984, - "content": "<|12.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50985, - "content": "<|12.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50986, - "content": "<|12.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50987, - "content": "<|12.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50988, - "content": "<|12.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50989, - "content": "<|12.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50990, - "content": "<|12.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50991, - "content": "<|12.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50992, - "content": "<|12.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50993, - "content": "<|12.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50994, - "content": "<|12.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50995, - "content": "<|12.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50996, - "content": "<|12.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50997, - "content": "<|12.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50998, - "content": "<|12.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 50999, - "content": "<|12.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51000, - "content": "<|12.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51001, - "content": "<|12.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51002, - "content": "<|12.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51003, - "content": "<|12.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51004, - "content": "<|12.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51005, - "content": "<|12.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51006, - "content": "<|12.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51007, - "content": "<|12.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51008, - "content": "<|12.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51009, - "content": "<|12.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51010, - "content": "<|12.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51011, - "content": "<|12.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51012, - "content": "<|12.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51013, - "content": "<|12.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51014, - "content": "<|12.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51015, - "content": "<|13.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51016, - "content": "<|13.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51017, - "content": "<|13.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51018, - "content": "<|13.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51019, - "content": "<|13.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51020, - "content": "<|13.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51021, - "content": "<|13.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51022, - "content": "<|13.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51023, - "content": "<|13.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51024, - "content": "<|13.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51025, - "content": "<|13.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51026, - "content": "<|13.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51027, - "content": "<|13.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51028, - "content": "<|13.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51029, - "content": "<|13.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51030, - "content": "<|13.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51031, - "content": "<|13.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51032, - "content": "<|13.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51033, - "content": "<|13.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51034, - "content": "<|13.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51035, - "content": "<|13.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51036, - "content": "<|13.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51037, - "content": "<|13.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51038, - "content": "<|13.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51039, - "content": "<|13.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51040, - "content": "<|13.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51041, - "content": "<|13.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51042, - "content": "<|13.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51043, - "content": "<|13.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51044, - "content": "<|13.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51045, - "content": "<|13.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51046, - "content": "<|13.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51047, - "content": "<|13.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51048, - "content": "<|13.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51049, - "content": "<|13.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51050, - "content": "<|13.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51051, - "content": "<|13.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51052, - "content": "<|13.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51053, - "content": "<|13.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51054, - "content": "<|13.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51055, - "content": "<|13.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51056, - "content": "<|13.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51057, - "content": "<|13.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51058, - "content": "<|13.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51059, - "content": "<|13.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51060, - "content": "<|13.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51061, - "content": "<|13.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51062, - "content": "<|13.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51063, - "content": "<|13.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51064, - "content": "<|13.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51065, - "content": "<|14.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51066, - "content": "<|14.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51067, - "content": "<|14.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51068, - "content": "<|14.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51069, - "content": "<|14.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51070, - "content": "<|14.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51071, - "content": "<|14.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51072, - "content": "<|14.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51073, - "content": "<|14.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51074, - "content": "<|14.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51075, - "content": "<|14.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51076, - "content": "<|14.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51077, - "content": "<|14.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51078, - "content": "<|14.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51079, - "content": "<|14.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51080, - "content": "<|14.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51081, - "content": "<|14.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51082, - "content": "<|14.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51083, - "content": "<|14.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51084, - "content": "<|14.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51085, - "content": "<|14.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51086, - "content": "<|14.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51087, - "content": "<|14.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51088, - "content": "<|14.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51089, - "content": "<|14.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51090, - "content": "<|14.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51091, - "content": "<|14.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51092, - "content": "<|14.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51093, - "content": "<|14.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51094, - "content": "<|14.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51095, - "content": "<|14.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51096, - "content": "<|14.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51097, - "content": "<|14.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51098, - "content": "<|14.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51099, - "content": "<|14.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51100, - "content": "<|14.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51101, - "content": "<|14.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51102, - "content": "<|14.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51103, - "content": "<|14.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51104, - "content": "<|14.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51105, - "content": "<|14.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51106, - "content": "<|14.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51107, - "content": "<|14.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51108, - "content": "<|14.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51109, - "content": "<|14.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51110, - "content": "<|14.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51111, - "content": "<|14.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51112, - "content": "<|14.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51113, - "content": "<|14.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51114, - "content": "<|14.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51115, - "content": "<|15.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51116, - "content": "<|15.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51117, - "content": "<|15.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51118, - "content": "<|15.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51119, - "content": "<|15.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51120, - "content": "<|15.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51121, - "content": "<|15.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51122, - "content": "<|15.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51123, - "content": "<|15.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51124, - "content": "<|15.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51125, - "content": "<|15.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51126, - "content": "<|15.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51127, - "content": "<|15.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51128, - "content": "<|15.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51129, - "content": "<|15.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51130, - "content": "<|15.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51131, - "content": "<|15.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51132, - "content": "<|15.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51133, - "content": "<|15.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51134, - "content": "<|15.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51135, - "content": "<|15.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51136, - "content": "<|15.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51137, - "content": "<|15.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51138, - "content": "<|15.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51139, - "content": "<|15.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51140, - "content": "<|15.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51141, - "content": "<|15.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51142, - "content": "<|15.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51143, - "content": "<|15.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51144, - "content": "<|15.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51145, - "content": "<|15.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51146, - "content": "<|15.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51147, - "content": "<|15.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51148, - "content": "<|15.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51149, - "content": "<|15.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51150, - "content": "<|15.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51151, - "content": "<|15.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51152, - "content": "<|15.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51153, - "content": "<|15.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51154, - "content": "<|15.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51155, - "content": "<|15.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51156, - "content": "<|15.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51157, - "content": "<|15.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51158, - "content": "<|15.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51159, - "content": "<|15.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51160, - "content": "<|15.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51161, - "content": "<|15.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51162, - "content": "<|15.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51163, - "content": "<|15.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51164, - "content": "<|15.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51165, - "content": "<|16.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51166, - "content": "<|16.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51167, - "content": "<|16.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51168, - "content": "<|16.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51169, - "content": "<|16.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51170, - "content": "<|16.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51171, - "content": "<|16.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51172, - "content": "<|16.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51173, - "content": "<|16.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51174, - "content": "<|16.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51175, - "content": "<|16.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51176, - "content": "<|16.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51177, - "content": "<|16.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51178, - "content": "<|16.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51179, - "content": "<|16.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51180, - "content": "<|16.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51181, - "content": "<|16.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51182, - "content": "<|16.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51183, - "content": "<|16.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51184, - "content": "<|16.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51185, - "content": "<|16.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51186, - "content": "<|16.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51187, - "content": "<|16.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51188, - "content": "<|16.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51189, - "content": "<|16.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51190, - "content": "<|16.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51191, - "content": "<|16.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51192, - "content": "<|16.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51193, - "content": "<|16.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51194, - "content": "<|16.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51195, - "content": "<|16.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51196, - "content": "<|16.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51197, - "content": "<|16.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51198, - "content": "<|16.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51199, - "content": "<|16.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51200, - "content": "<|16.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51201, - "content": "<|16.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51202, - "content": "<|16.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51203, - "content": "<|16.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51204, - "content": "<|16.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51205, - "content": "<|16.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51206, - "content": "<|16.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51207, - "content": "<|16.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51208, - "content": "<|16.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51209, - "content": "<|16.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51210, - "content": "<|16.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51211, - "content": "<|16.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51212, - "content": "<|16.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51213, - "content": "<|16.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51214, - "content": "<|16.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51215, - "content": "<|17.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51216, - "content": "<|17.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51217, - "content": "<|17.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51218, - "content": "<|17.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51219, - "content": "<|17.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51220, - "content": "<|17.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51221, - "content": "<|17.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51222, - "content": "<|17.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51223, - "content": "<|17.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51224, - "content": "<|17.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51225, - "content": "<|17.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51226, - "content": "<|17.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51227, - "content": "<|17.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51228, - "content": "<|17.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51229, - "content": "<|17.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51230, - "content": "<|17.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51231, - "content": "<|17.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51232, - "content": "<|17.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51233, - "content": "<|17.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51234, - "content": "<|17.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51235, - "content": "<|17.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51236, - "content": "<|17.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51237, - "content": "<|17.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51238, - "content": "<|17.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51239, - "content": "<|17.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51240, - "content": "<|17.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51241, - "content": "<|17.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51242, - "content": "<|17.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51243, - "content": "<|17.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51244, - "content": "<|17.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51245, - "content": "<|17.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51246, - "content": "<|17.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51247, - "content": "<|17.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51248, - "content": "<|17.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51249, - "content": "<|17.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51250, - "content": "<|17.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51251, - "content": "<|17.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51252, - "content": "<|17.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51253, - "content": "<|17.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51254, - "content": "<|17.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51255, - "content": "<|17.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51256, - "content": "<|17.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51257, - "content": "<|17.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51258, - "content": "<|17.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51259, - "content": "<|17.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51260, - "content": "<|17.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51261, - "content": "<|17.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51262, - "content": "<|17.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51263, - "content": "<|17.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51264, - "content": "<|17.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51265, - "content": "<|18.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51266, - "content": "<|18.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51267, - "content": "<|18.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51268, - "content": "<|18.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51269, - "content": "<|18.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51270, - "content": "<|18.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51271, - "content": "<|18.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51272, - "content": "<|18.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51273, - "content": "<|18.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51274, - "content": "<|18.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51275, - "content": "<|18.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51276, - "content": "<|18.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51277, - "content": "<|18.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51278, - "content": "<|18.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51279, - "content": "<|18.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51280, - "content": "<|18.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51281, - "content": "<|18.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51282, - "content": "<|18.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51283, - "content": "<|18.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51284, - "content": "<|18.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51285, - "content": "<|18.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51286, - "content": "<|18.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51287, - "content": "<|18.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51288, - "content": "<|18.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51289, - "content": "<|18.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51290, - "content": "<|18.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51291, - "content": "<|18.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51292, - "content": "<|18.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51293, - "content": "<|18.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51294, - "content": "<|18.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51295, - "content": "<|18.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51296, - "content": "<|18.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51297, - "content": "<|18.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51298, - "content": "<|18.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51299, - "content": "<|18.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51300, - "content": "<|18.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51301, - "content": "<|18.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51302, - "content": "<|18.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51303, - "content": "<|18.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51304, - "content": "<|18.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51305, - "content": "<|18.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51306, - "content": "<|18.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51307, - "content": "<|18.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51308, - "content": "<|18.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51309, - "content": "<|18.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51310, - "content": "<|18.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51311, - "content": "<|18.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51312, - "content": "<|18.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51313, - "content": "<|18.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51314, - "content": "<|18.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51315, - "content": "<|19.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51316, - "content": "<|19.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51317, - "content": "<|19.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51318, - "content": "<|19.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51319, - "content": "<|19.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51320, - "content": "<|19.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51321, - "content": "<|19.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51322, - "content": "<|19.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51323, - "content": "<|19.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51324, - "content": "<|19.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51325, - "content": "<|19.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51326, - "content": "<|19.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51327, - "content": "<|19.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51328, - "content": "<|19.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51329, - "content": "<|19.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51330, - "content": "<|19.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51331, - "content": "<|19.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51332, - "content": "<|19.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51333, - "content": "<|19.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51334, - "content": "<|19.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51335, - "content": "<|19.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51336, - "content": "<|19.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51337, - "content": "<|19.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51338, - "content": "<|19.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51339, - "content": "<|19.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51340, - "content": "<|19.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51341, - "content": "<|19.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51342, - "content": "<|19.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51343, - "content": "<|19.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51344, - "content": "<|19.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51345, - "content": "<|19.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51346, - "content": "<|19.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51347, - "content": "<|19.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51348, - "content": "<|19.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51349, - "content": "<|19.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51350, - "content": "<|19.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51351, - "content": "<|19.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51352, - "content": "<|19.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51353, - "content": "<|19.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51354, - "content": "<|19.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51355, - "content": "<|19.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51356, - "content": "<|19.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51357, - "content": "<|19.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51358, - "content": "<|19.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51359, - "content": "<|19.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51360, - "content": "<|19.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51361, - "content": "<|19.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51362, - "content": "<|19.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51363, - "content": "<|19.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51364, - "content": "<|19.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51365, - "content": "<|20.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51366, - "content": "<|20.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51367, - "content": "<|20.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51368, - "content": "<|20.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51369, - "content": "<|20.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51370, - "content": "<|20.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51371, - "content": "<|20.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51372, - "content": "<|20.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51373, - "content": "<|20.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51374, - "content": "<|20.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51375, - "content": "<|20.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51376, - "content": "<|20.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51377, - "content": "<|20.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51378, - "content": "<|20.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51379, - "content": "<|20.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51380, - "content": "<|20.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51381, - "content": "<|20.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51382, - "content": "<|20.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51383, - "content": "<|20.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51384, - "content": "<|20.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51385, - "content": "<|20.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51386, - "content": "<|20.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51387, - "content": "<|20.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51388, - "content": "<|20.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51389, - "content": "<|20.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51390, - "content": "<|20.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51391, - "content": "<|20.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51392, - "content": "<|20.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51393, - "content": "<|20.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51394, - "content": "<|20.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51395, - "content": "<|20.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51396, - "content": "<|20.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51397, - "content": "<|20.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51398, - "content": "<|20.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51399, - "content": "<|20.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51400, - "content": "<|20.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51401, - "content": "<|20.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51402, - "content": "<|20.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51403, - "content": "<|20.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51404, - "content": "<|20.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51405, - "content": "<|20.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51406, - "content": "<|20.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51407, - "content": "<|20.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51408, - "content": "<|20.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51409, - "content": "<|20.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51410, - "content": "<|20.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51411, - "content": "<|20.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51412, - "content": "<|20.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51413, - "content": "<|20.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51414, - "content": "<|20.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51415, - "content": "<|21.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51416, - "content": "<|21.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51417, - "content": "<|21.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51418, - "content": "<|21.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51419, - "content": "<|21.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51420, - "content": "<|21.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51421, - "content": "<|21.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51422, - "content": "<|21.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51423, - "content": "<|21.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51424, - "content": "<|21.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51425, - "content": "<|21.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51426, - "content": "<|21.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51427, - "content": "<|21.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51428, - "content": "<|21.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51429, - "content": "<|21.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51430, - "content": "<|21.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51431, - "content": "<|21.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51432, - "content": "<|21.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51433, - "content": "<|21.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51434, - "content": "<|21.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51435, - "content": "<|21.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51436, - "content": "<|21.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51437, - "content": "<|21.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51438, - "content": "<|21.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51439, - "content": "<|21.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51440, - "content": "<|21.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51441, - "content": "<|21.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51442, - "content": "<|21.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51443, - "content": "<|21.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51444, - "content": "<|21.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51445, - "content": "<|21.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51446, - "content": "<|21.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51447, - "content": "<|21.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51448, - "content": "<|21.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51449, - "content": "<|21.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51450, - "content": "<|21.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51451, - "content": "<|21.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51452, - "content": "<|21.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51453, - "content": "<|21.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51454, - "content": "<|21.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51455, - "content": "<|21.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51456, - "content": "<|21.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51457, - "content": "<|21.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51458, - "content": "<|21.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51459, - "content": "<|21.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51460, - "content": "<|21.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51461, - "content": "<|21.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51462, - "content": "<|21.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51463, - "content": "<|21.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51464, - "content": "<|21.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51465, - "content": "<|22.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51466, - "content": "<|22.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51467, - "content": "<|22.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51468, - "content": "<|22.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51469, - "content": "<|22.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51470, - "content": "<|22.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51471, - "content": "<|22.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51472, - "content": "<|22.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51473, - "content": "<|22.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51474, - "content": "<|22.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51475, - "content": "<|22.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51476, - "content": "<|22.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51477, - "content": "<|22.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51478, - "content": "<|22.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51479, - "content": "<|22.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51480, - "content": "<|22.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51481, - "content": "<|22.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51482, - "content": "<|22.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51483, - "content": "<|22.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51484, - "content": "<|22.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51485, - "content": "<|22.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51486, - "content": "<|22.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51487, - "content": "<|22.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51488, - "content": "<|22.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51489, - "content": "<|22.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51490, - "content": "<|22.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51491, - "content": "<|22.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51492, - "content": "<|22.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51493, - "content": "<|22.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51494, - "content": "<|22.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51495, - "content": "<|22.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51496, - "content": "<|22.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51497, - "content": "<|22.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51498, - "content": "<|22.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51499, - "content": "<|22.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51500, - "content": "<|22.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51501, - "content": "<|22.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51502, - "content": "<|22.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51503, - "content": "<|22.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51504, - "content": "<|22.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51505, - "content": "<|22.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51506, - "content": "<|22.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51507, - "content": "<|22.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51508, - "content": "<|22.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51509, - "content": "<|22.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51510, - "content": "<|22.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51511, - "content": "<|22.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51512, - "content": "<|22.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51513, - "content": "<|22.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51514, - "content": "<|22.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51515, - "content": "<|23.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51516, - "content": "<|23.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51517, - "content": "<|23.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51518, - "content": "<|23.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51519, - "content": "<|23.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51520, - "content": "<|23.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51521, - "content": "<|23.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51522, - "content": "<|23.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51523, - "content": "<|23.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51524, - "content": "<|23.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51525, - "content": "<|23.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51526, - "content": "<|23.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51527, - "content": "<|23.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51528, - "content": "<|23.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51529, - "content": "<|23.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51530, - "content": "<|23.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51531, - "content": "<|23.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51532, - "content": "<|23.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51533, - "content": "<|23.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51534, - "content": "<|23.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51535, - "content": "<|23.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51536, - "content": "<|23.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51537, - "content": "<|23.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51538, - "content": "<|23.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51539, - "content": "<|23.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51540, - "content": "<|23.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51541, - "content": "<|23.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51542, - "content": "<|23.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51543, - "content": "<|23.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51544, - "content": "<|23.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51545, - "content": "<|23.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51546, - "content": "<|23.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51547, - "content": "<|23.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51548, - "content": "<|23.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51549, - "content": "<|23.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51550, - "content": "<|23.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51551, - "content": "<|23.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51552, - "content": "<|23.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51553, - "content": "<|23.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51554, - "content": "<|23.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51555, - "content": "<|23.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51556, - "content": "<|23.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51557, - "content": "<|23.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51558, - "content": "<|23.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51559, - "content": "<|23.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51560, - "content": "<|23.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51561, - "content": "<|23.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51562, - "content": "<|23.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51563, - "content": "<|23.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51564, - "content": "<|23.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51565, - "content": "<|24.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51566, - "content": "<|24.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51567, - "content": "<|24.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51568, - "content": "<|24.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51569, - "content": "<|24.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51570, - "content": "<|24.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51571, - "content": "<|24.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51572, - "content": "<|24.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51573, - "content": "<|24.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51574, - "content": "<|24.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51575, - "content": "<|24.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51576, - "content": "<|24.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51577, - "content": "<|24.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51578, - "content": "<|24.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51579, - "content": "<|24.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51580, - "content": "<|24.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51581, - "content": "<|24.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51582, - "content": "<|24.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51583, - "content": "<|24.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51584, - "content": "<|24.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51585, - "content": "<|24.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51586, - "content": "<|24.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51587, - "content": "<|24.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51588, - "content": "<|24.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51589, - "content": "<|24.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51590, - "content": "<|24.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51591, - "content": "<|24.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51592, - "content": "<|24.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51593, - "content": "<|24.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51594, - "content": "<|24.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51595, - "content": "<|24.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51596, - "content": "<|24.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51597, - "content": "<|24.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51598, - "content": "<|24.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51599, - "content": "<|24.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51600, - "content": "<|24.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51601, - "content": "<|24.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51602, - "content": "<|24.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51603, - "content": "<|24.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51604, - "content": "<|24.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51605, - "content": "<|24.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51606, - "content": "<|24.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51607, - "content": "<|24.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51608, - "content": "<|24.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51609, - "content": "<|24.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51610, - "content": "<|24.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51611, - "content": "<|24.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51612, - "content": "<|24.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51613, - "content": "<|24.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51614, - "content": "<|24.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51615, - "content": "<|25.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51616, - "content": "<|25.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51617, - "content": "<|25.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51618, - "content": "<|25.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51619, - "content": "<|25.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51620, - "content": "<|25.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51621, - "content": "<|25.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51622, - "content": "<|25.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51623, - "content": "<|25.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51624, - "content": "<|25.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51625, - "content": "<|25.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51626, - "content": "<|25.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51627, - "content": "<|25.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51628, - "content": "<|25.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51629, - "content": "<|25.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51630, - "content": "<|25.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51631, - "content": "<|25.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51632, - "content": "<|25.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51633, - "content": "<|25.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51634, - "content": "<|25.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51635, - "content": "<|25.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51636, - "content": "<|25.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51637, - "content": "<|25.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51638, - "content": "<|25.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51639, - "content": "<|25.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51640, - "content": "<|25.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51641, - "content": "<|25.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51642, - "content": "<|25.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51643, - "content": "<|25.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51644, - "content": "<|25.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51645, - "content": "<|25.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51646, - "content": "<|25.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51647, - "content": "<|25.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51648, - "content": "<|25.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51649, - "content": "<|25.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51650, - "content": "<|25.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51651, - "content": "<|25.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51652, - "content": "<|25.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51653, - "content": "<|25.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51654, - "content": "<|25.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51655, - "content": "<|25.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51656, - "content": "<|25.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51657, - "content": "<|25.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51658, - "content": "<|25.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51659, - "content": "<|25.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51660, - "content": "<|25.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51661, - "content": "<|25.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51662, - "content": "<|25.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51663, - "content": "<|25.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51664, - "content": "<|25.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51665, - "content": "<|26.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51666, - "content": "<|26.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51667, - "content": "<|26.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51668, - "content": "<|26.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51669, - "content": "<|26.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51670, - "content": "<|26.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51671, - "content": "<|26.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51672, - "content": "<|26.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51673, - "content": "<|26.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51674, - "content": "<|26.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51675, - "content": "<|26.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51676, - "content": "<|26.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51677, - "content": "<|26.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51678, - "content": "<|26.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51679, - "content": "<|26.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51680, - "content": "<|26.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51681, - "content": "<|26.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51682, - "content": "<|26.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51683, - "content": "<|26.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51684, - "content": "<|26.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51685, - "content": "<|26.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51686, - "content": "<|26.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51687, - "content": "<|26.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51688, - "content": "<|26.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51689, - "content": "<|26.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51690, - "content": "<|26.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51691, - "content": "<|26.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51692, - "content": "<|26.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51693, - "content": "<|26.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51694, - "content": "<|26.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51695, - "content": "<|26.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51696, - "content": "<|26.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51697, - "content": "<|26.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51698, - "content": "<|26.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51699, - "content": "<|26.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51700, - "content": "<|26.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51701, - "content": "<|26.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51702, - "content": "<|26.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51703, - "content": "<|26.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51704, - "content": "<|26.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51705, - "content": "<|26.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51706, - "content": "<|26.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51707, - "content": "<|26.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51708, - "content": "<|26.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51709, - "content": "<|26.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51710, - "content": "<|26.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51711, - "content": "<|26.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51712, - "content": "<|26.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51713, - "content": "<|26.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51714, - "content": "<|26.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51715, - "content": "<|27.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51716, - "content": "<|27.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51717, - "content": "<|27.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51718, - "content": "<|27.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51719, - "content": "<|27.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51720, - "content": "<|27.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51721, - "content": "<|27.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51722, - "content": "<|27.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51723, - "content": "<|27.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51724, - "content": "<|27.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51725, - "content": "<|27.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51726, - "content": "<|27.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51727, - "content": "<|27.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51728, - "content": "<|27.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51729, - "content": "<|27.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51730, - "content": "<|27.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51731, - "content": "<|27.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51732, - "content": "<|27.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51733, - "content": "<|27.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51734, - "content": "<|27.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51735, - "content": "<|27.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51736, - "content": "<|27.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51737, - "content": "<|27.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51738, - "content": "<|27.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51739, - "content": "<|27.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51740, - "content": "<|27.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51741, - "content": "<|27.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51742, - "content": "<|27.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51743, - "content": "<|27.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51744, - "content": "<|27.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51745, - "content": "<|27.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51746, - "content": "<|27.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51747, - "content": "<|27.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51748, - "content": "<|27.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51749, - "content": "<|27.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51750, - "content": "<|27.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51751, - "content": "<|27.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51752, - "content": "<|27.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51753, - "content": "<|27.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51754, - "content": "<|27.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51755, - "content": "<|27.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51756, - "content": "<|27.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51757, - "content": "<|27.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51758, - "content": "<|27.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51759, - "content": "<|27.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51760, - "content": "<|27.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51761, - "content": "<|27.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51762, - "content": "<|27.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51763, - "content": "<|27.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51764, - "content": "<|27.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51765, - "content": "<|28.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51766, - "content": "<|28.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51767, - "content": "<|28.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51768, - "content": "<|28.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51769, - "content": "<|28.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51770, - "content": "<|28.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51771, - "content": "<|28.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51772, - "content": "<|28.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51773, - "content": "<|28.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51774, - "content": "<|28.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51775, - "content": "<|28.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51776, - "content": "<|28.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51777, - "content": "<|28.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51778, - "content": "<|28.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51779, - "content": "<|28.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51780, - "content": "<|28.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51781, - "content": "<|28.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51782, - "content": "<|28.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51783, - "content": "<|28.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51784, - "content": "<|28.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51785, - "content": "<|28.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51786, - "content": "<|28.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51787, - "content": "<|28.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51788, - "content": "<|28.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51789, - "content": "<|28.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51790, - "content": "<|28.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51791, - "content": "<|28.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51792, - "content": "<|28.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51793, - "content": "<|28.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51794, - "content": "<|28.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51795, - "content": "<|28.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51796, - "content": "<|28.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51797, - "content": "<|28.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51798, - "content": "<|28.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51799, - "content": "<|28.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51800, - "content": "<|28.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51801, - "content": "<|28.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51802, - "content": "<|28.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51803, - "content": "<|28.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51804, - "content": "<|28.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51805, - "content": "<|28.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51806, - "content": "<|28.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51807, - "content": "<|28.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51808, - "content": "<|28.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51809, - "content": "<|28.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51810, - "content": "<|28.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51811, - "content": "<|28.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51812, - "content": "<|28.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51813, - "content": "<|28.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51814, - "content": "<|28.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51815, - "content": "<|29.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51816, - "content": "<|29.02|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51817, - "content": "<|29.04|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51818, - "content": "<|29.06|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51819, - "content": "<|29.08|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51820, - "content": "<|29.10|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51821, - "content": "<|29.12|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51822, - "content": "<|29.14|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51823, - "content": "<|29.16|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51824, - "content": "<|29.18|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51825, - "content": "<|29.20|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51826, - "content": "<|29.22|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51827, - "content": "<|29.24|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51828, - "content": "<|29.26|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51829, - "content": "<|29.28|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51830, - "content": "<|29.30|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51831, - "content": "<|29.32|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51832, - "content": "<|29.34|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51833, - "content": "<|29.36|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51834, - "content": "<|29.38|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51835, - "content": "<|29.40|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51836, - "content": "<|29.42|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51837, - "content": "<|29.44|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51838, - "content": "<|29.46|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51839, - "content": "<|29.48|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51840, - "content": "<|29.50|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51841, - "content": "<|29.52|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51842, - "content": "<|29.54|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51843, - "content": "<|29.56|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51844, - "content": "<|29.58|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51845, - "content": "<|29.60|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51846, - "content": "<|29.62|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51847, - "content": "<|29.64|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51848, - "content": "<|29.66|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51849, - "content": "<|29.68|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51850, - "content": "<|29.70|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51851, - "content": "<|29.72|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51852, - "content": "<|29.74|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51853, - "content": "<|29.76|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51854, - "content": "<|29.78|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51855, - "content": "<|29.80|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51856, - "content": "<|29.82|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51857, - "content": "<|29.84|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51858, - "content": "<|29.86|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51859, - "content": "<|29.88|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51860, - "content": "<|29.90|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51861, - "content": "<|29.92|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51862, - "content": "<|29.94|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51863, - "content": "<|29.96|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51864, - "content": "<|29.98|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - }, - { - "id": 51865, - "content": "<|30.00|>", - "single_word": false, - "lstrip": false, - "rstrip": false, - "normalized": true, - "special": false - } - ], - "normalizer": null, - "pre_tokenizer": { - "type": "ByteLevel", - "add_prefix_space": false, - "trim_offsets": true, - "use_regex": true - }, - "post_processor": { - "type": "TemplateProcessing", - "single": [ - { - "SpecialToken": { - "id": "<|startoftranscript|>", - "type_id": 0 - } - }, - { - "SpecialToken": { - "id": "<|notimestamps|>", - "type_id": 0 - } - }, - { - "Sequence": { - "id": "A", - "type_id": 0 - } - }, - { - "SpecialToken": { - "id": "<|endoftext|>", - "type_id": 0 - } - } - ], - "pair": [ - { - "SpecialToken": { - "id": "<|startoftranscript|>", - "type_id": 0 - } - }, - { - "SpecialToken": { - "id": "<|notimestamps|>", - "type_id": 0 - } - }, - { - "Sequence": { - "id": "A", - "type_id": 0 - } - }, - { - "Sequence": { - "id": "B", - "type_id": 1 - } - }, - { - "SpecialToken": { - "id": "<|endoftext|>", - "type_id": 1 - } - } - ], - "special_tokens": { - "<|endoftext|>": { - "id": "<|endoftext|>", - "ids": [ - 50257 - ], - "tokens": [ - "<|endoftext|>" - ] - }, - "<|notimestamps|>": { - "id": "<|notimestamps|>", - "ids": [ - 50364 - ], - "tokens": [ - "<|notimestamps|>" - ] - }, - "<|startoftranscript|>": { - "id": "<|startoftranscript|>", - "ids": [ - 50258 - ], - "tokens": [ - "<|startoftranscript|>" - ] - } - } - }, - "decoder": { - "type": "ByteLevel", - "add_prefix_space": true, - "trim_offsets": true, - "use_regex": true - }, - "model": { - "type": "BPE", - "dropout": null, - "unk_token": null, - "continuing_subword_prefix": "", - "end_of_word_suffix": "", - "fuse_unk": false, - "byte_fallback": false, - "vocab": { - "!": 0, - "\"": 1, - "#": 2, - "$": 3, - "%": 4, - "&": 5, - "'": 6, - "(": 7, - ")": 8, - "*": 9, - "+": 10, - ",": 11, - "-": 12, - ".": 13, - "/": 14, - "0": 15, - "1": 16, - "2": 17, - "3": 18, - "4": 19, - "5": 20, - "6": 21, - "7": 22, - "8": 23, - "9": 24, - ":": 25, - ";": 26, - "<": 27, - "=": 28, - ">": 29, - "?": 30, - "@": 31, - "A": 32, - "B": 33, - "C": 34, - "D": 35, - "E": 36, - "F": 37, - "G": 38, - "H": 39, - "I": 40, - "J": 41, - "K": 42, - "L": 43, - "M": 44, - "N": 45, - "O": 46, - "P": 47, - "Q": 48, - "R": 49, - "S": 50, - "T": 51, - "U": 52, - "V": 53, - "W": 54, - "X": 55, - "Y": 56, - "Z": 57, - "[": 58, - "\\": 59, - "]": 60, - "^": 61, - "_": 62, - "`": 63, - "a": 64, - "b": 65, - "c": 66, - "d": 67, - "e": 68, - "f": 69, - "g": 70, - "h": 71, - "i": 72, - "j": 73, - "k": 74, - "l": 75, - "m": 76, - "n": 77, - "o": 78, - "p": 79, - "q": 80, - "r": 81, - "s": 82, - "t": 83, - "u": 84, - "v": 85, - "w": 86, - "x": 87, - "y": 88, - "z": 89, - "{": 90, - "|": 91, - "}": 92, - "~": 93, - "¡": 94, - "¢": 95, - "£": 96, - "¤": 97, - "¥": 98, - "¦": 99, - "§": 100, - "¨": 101, - "©": 102, - "ª": 103, - "«": 104, - "¬": 105, - "®": 106, - "¯": 107, - "°": 108, - "±": 109, - "²": 110, - "³": 111, - "´": 112, - "µ": 113, - "¶": 114, - "·": 115, - "¸": 116, - "¹": 117, - "º": 118, - "»": 119, - "¼": 120, - "½": 121, - "¾": 122, - "¿": 123, - "À": 124, - "Á": 125, - "Â": 126, - "Ã": 127, - "Ä": 128, - "Å": 129, - "Æ": 130, - "Ç": 131, - "È": 132, - "É": 133, - "Ê": 134, - "Ë": 135, - "Ì": 136, - "Í": 137, - "Î": 138, - "Ï": 139, - "Ð": 140, - "Ñ": 141, - "Ò": 142, - "Ó": 143, - "Ô": 144, - "Õ": 145, - "Ö": 146, - "×": 147, - "Ø": 148, - "Ù": 149, - "Ú": 150, - "Û": 151, - "Ü": 152, - "Ý": 153, - "Þ": 154, - "ß": 155, - "à": 156, - "á": 157, - "â": 158, - "ã": 159, - "ä": 160, - "å": 161, - "æ": 162, - "ç": 163, - "è": 164, - "é": 165, - "ê": 166, - "ë": 167, - "ì": 168, - "í": 169, - "î": 170, - "ï": 171, - "ð": 172, - "ñ": 173, - "ò": 174, - "ó": 175, - "ô": 176, - "õ": 177, - "ö": 178, - "÷": 179, - "ø": 180, - "ù": 181, - "ú": 182, - "û": 183, - "ü": 184, - "ý": 185, - "þ": 186, - "ÿ": 187, - "Ā": 188, - "ā": 189, - "Ă": 190, - "ă": 191, - "Ą": 192, - "ą": 193, - "Ć": 194, - "ć": 195, - "Ĉ": 196, - "ĉ": 197, - "Ċ": 198, - "ċ": 199, - "Č": 200, - "č": 201, - "Ď": 202, - "ď": 203, - "Đ": 204, - "đ": 205, - "Ē": 206, - "ē": 207, - "Ĕ": 208, - "ĕ": 209, - "Ė": 210, - "ė": 211, - "Ę": 212, - "ę": 213, - "Ě": 214, - "ě": 215, - "Ĝ": 216, - "ĝ": 217, - "Ğ": 218, - "ğ": 219, - "Ġ": 220, - "ġ": 221, - "Ģ": 222, - "ģ": 223, - "Ĥ": 224, - "ĥ": 225, - "Ħ": 226, - "ħ": 227, - "Ĩ": 228, - "ĩ": 229, - "Ī": 230, - "ī": 231, - "Ĭ": 232, - "ĭ": 233, - "Į": 234, - "į": 235, - "İ": 236, - "ı": 237, - "IJ": 238, - "ij": 239, - "Ĵ": 240, - "ĵ": 241, - "Ķ": 242, - "ķ": 243, - "ĸ": 244, - "Ĺ": 245, - "ĺ": 246, - "Ļ": 247, - "ļ": 248, - "Ľ": 249, - "ľ": 250, - "Ŀ": 251, - "ŀ": 252, - "Ł": 253, - "ł": 254, - "Ń": 255, - "Ġt": 256, - "Ġa": 257, - "Ġth": 258, - "in": 259, - "er": 260, - "Ġw": 261, - "Ġs": 262, - "ou": 263, - "Ġthe": 264, - "re": 265, - "on": 266, - "at": 267, - "en": 268, - "Ġc": 269, - "it": 270, - "is": 271, - "Ġb": 272, - "nd": 273, - "Ġd": 274, - "Ġm": 275, - "Ġh": 276, - "Ġo": 277, - "ing": 278, - "es": 279, - "Ġp": 280, - "Ġto": 281, - "an": 282, - "Ġf": 283, - "or": 284, - "ll": 285, - "ĠI": 286, - "Ġl": 287, - "Ġy": 288, - "ar": 289, - "Ġg": 290, - "Ġyou": 291, - "ed": 292, - "Ġand": 293, - "Ġin": 294, - "Ġof": 295, - "as": 296, - "Ġn": 297, - "om": 298, - "ic": 299, - "Ġthat": 300, - "us": 301, - "et": 302, - "ve": 303, - "al": 304, - "ow": 305, - "le": 306, - "Ġis": 307, - "Ġe": 308, - "Ġit": 309, - "ot": 310, - "'s": 311, - "Ġbe": 312, - "ion": 313, - "ĠT": 314, - "Ġwh": 315, - "ĠA": 316, - "ent": 317, - "ĠS": 318, - "Ġre": 319, - "ay": 320, - "Ġwe": 321, - "Ġon": 322, - "ere": 323, - "Ġha": 324, - "ut": 325, - "ac": 326, - "id": 327, - "ig": 328, - "os": 329, - "ke": 330, - "ver": 331, - "im": 332, - "ĠÐ": 333, - "ĠTh": 334, - "am": 335, - "all": 336, - "Ġfor": 337, - "el": 338, - "ch": 339, - "ro": 340, - "Ġthis": 341, - "Ġst": 342, - "ĠW": 343, - "Ġu": 344, - "ad": 345, - "out": 346, - "ir": 347, - "ld": 348, - "ct": 349, - "Ġk": 350, - "if": 351, - "Ġgo": 352, - "..": 353, - "о": 354, - "ith": 355, - "ly": 356, - "ht": 357, - "qu": 358, - "Ġ-": 359, - "Ġdo": 360, - "Ġj": 361, - "Ġhave": 362, - "ĠB": 363, - "Ġan": 364, - "Ġwith": 365, - "Ġare": 366, - "Ġr": 367, - "Ġde": 368, - "Ġse": 369, - "Ġso": 370, - "Ġv": 371, - "st": 372, - "ill": 373, - "ur": 374, - "Ġli": 375, - "ĠM": 376, - "est": 377, - "od": 378, - "ally": 379, - "'t": 380, - "ust": 381, - "Ġas": 382, - "ĠC": 383, - "ce": 384, - "Ġme": 385, - "а": 386, - "е": 387, - "il": 388, - "ĠH": 389, - "Ġwas": 390, - "ter": 391, - "th": 392, - "Ġcan": 393, - "ant": 394, - "Ġcom": 395, - "our": 396, - "ight": 397, - "ĠY": 398, - "ation": 399, - "ĠAnd": 400, - "ol": 401, - "Ġsh": 402, - "ÑĤ": 403, - "op": 404, - "se": 405, - "Ġnot": 406, - "ĠSo": 407, - "Ġne": 408, - "un": 409, - "Ġab": 410, - "Ġlike": 411, - "Ġat": 412, - "ĠD": 413, - "ie": 414, - "Ġhe": 415, - "Ġcon": 416, - "Ġch": 417, - "ore": 418, - "Ġal": 419, - "Ġor": 420, - "Ġqu": 421, - "ĠO": 422, - "ome": 423, - "ra": 424, - "ul": 425, - "ĠN": 426, - "pp": 427, - "Ġyour": 428, - "ould": 429, - "ĠP": 430, - "Ġfr": 431, - "ge": 432, - "ers": 433, - "'re": 434, - "и": 435, - "Ġthey": 436, - "Ġwhat": 437, - "use": 438, - "Ġall": 439, - "ĠThe": 440, - "ĠL": 441, - "ess": 442, - "em": 443, - "Ġkn": 444, - "Ġjust": 445, - "art": 446, - "Ġpro": 447, - "very": 448, - "um": 449, - "Ġlo": 450, - "Ġì": 451, - "Ġmy": 452, - "ok": 453, - "Ġex": 454, - "ab": 455, - "Ġthere": 456, - "Ġbut": 457, - "Ġknow": 458, - "Ġsu": 459, - "ĠG": 460, - "Ñģ": 461, - "ĠE": 462, - "Ġma": 463, - "оÐ": 464, - "Ġen": 465, - "Ġabout": 466, - "ĠIt": 467, - "ist": 468, - "Ġwor": 469, - "ri": 470, - "ind": 471, - "Ġone": 472, - "ate": 473, - "and": 474, - "ink": 475, - "Ġle": 476, - "ort": 477, - "'m": 478, - "ĠF": 479, - "ich": 480, - "ÑĢ": 481, - "ide": 482, - "Ġget": 483, - "Ġout": 484, - "...": 485, - "Ġwill": 486, - "ãģ": 487, - "ive": 488, - "н": 489, - "Ġfrom": 490, - "ain": 491, - "ĠWe": 492, - "Ġup": 493, - "pe": 494, - "res": 495, - "ca": 496, - "ĠR": 497, - "Ġif": 498, - "Ġpl": 499, - "Ġdon": 500, - "ack": 501, - "Ġ1": 502, - "Ġ\"": 503, - "Ġtr": 504, - "Ġus": 505, - "ĠWh": 506, - "ity": 507, - "ĠJ": 508, - "ĠYou": 509, - "Ġhere": 510, - "her": 511, - "Ġsome": 512, - "oug": 513, - "ak": 514, - "ard": 515, - "Ġgoing": 516, - "Ġun": 517, - "ment": 518, - "Ġthink": 519, - "Ġpe": 520, - "end": 521, - "Ġ(": 522, - "cause": 523, - "Ġtim": 524, - "ast": 525, - "é": 526, - "Ġour": 527, - "Ġwant": 528, - "ame": 529, - "ies": 530, - "Ġë": 531, - "ud": 532, - "ine": 533, - "Ġreally": 534, - "Ġte": 535, - "Ġsee": 536, - "ci": 537, - "Ġby": 538, - "so": 539, - "ure": 540, - "ose": 541, - "Ġ[": 542, - "are": 543, - "Ġmore": 544, - "ah": 545, - "one": 546, - "ck": 547, - "ople": 548, - "аÐ": 549, - "Ġthen": 550, - "Ġthing": 551, - "Ġthem": 552, - "ven": 553, - "ound": 554, - "ost": 555, - "ong": 556, - "ect": 557, - "Ġright": 558, - "ag": 559, - "Ġint": 560, - "Ġpeople": 561, - "Ġwhen": 562, - "ous": 563, - "pl": 564, - "Ġtime": 565, - "Ġim": 566, - "Ġwho": 567, - "Ġ2": 568, - "ap": 569, - "Ġbecause": 570, - "hing": 571, - "Ġno": 572, - "ice": 573, - "Ġlook": 574, - "Ġhas": 575, - "Ġwould": 576, - "Ġhow": 577, - "act": 578, - "Ġfe": 579, - "nt": 580, - "ough": 581, - "Ġpr": 582, - "ĠBut": 583, - "Ġsay": 584, - "Ñĥ": 585, - "Ġnow": 586, - "Ġman": 587, - "Ġvery": 588, - "Ġwork": 589, - "iz": 590, - "ĠK": 591, - "iv": 592, - "itt": 593, - "Ġar": 594, - "ep": 595, - "Ġcl": 596, - "Ġwhich": 597, - "Ġco": 598, - "ans": 599, - "'ve": 600, - "Ġsa": 601, - "ff": 602, - "'ll": 603, - "Ġany": 604, - "Ġact": 605, - "Ġye": 606, - "ber": 607, - "ach": 608, - "age": 609, - "per": 610, - "Ġalso": 611, - "fer": 612, - "Ġthese": 613, - "Ġad": 614, - "еÐ": 615, - "ther": 616, - "ace": 617, - "ick": 618, - "ake": 619, - "reat": 620, - "ire": 621, - "ue": 622, - "Ġag": 623, - "ĠU": 624, - "uch": 625, - "ions": 626, - "ry": 627, - "00": 628, - "na": 629, - "Ġdid": 630, - "Ġque": 631, - "Ġhad": 632, - "Ġevery": 633, - "ĠHe": 634, - "Ġla": 635, - "Ġway": 636, - "Ġsp": 637, - "ble": 638, - "ĠThis": 639, - "ass": 640, - "Ġtheir": 641, - "ite": 642, - "Ġneed": 643, - "Ġpart": 644, - "Ġwere": 645, - "Ġback": 646, - "ip": 647, - "own": 648, - "omet": 649, - "be": 650, - "ase": 651, - "Ġmake": 652, - "irst": 653, - "ia": 654, - "ence": 655, - "ang": 656, - "ank": 657, - "Ġgot": 658, - "Ġpre": 659, - "Ġcont": 660, - "Ġother": 661, - "pt": 662, - "ĠThat": 663, - "og": 664, - "Ġgood": 665, - "Ġinto": 666, - "alk": 667, - "Ġbeen": 668, - "Ġam": 669, - "Ġover": 670, - "ually": 671, - "Ġâ": 672, - "ìĿ": 673, - "Ġund": 674, - "he": 675, - "way": 676, - "Ġgr": 677, - "ÑĮ": 678, - "Ġdif": 679, - "Ġper": 680, - "Ñı": 681, - "ĠIn": 682, - "Ġtw": 683, - "ond": 684, - "ars": 685, - "int": 686, - "orm": 687, - "Ġlot": 688, - "Ġwhere": 689, - "ĠÃ": 690, - "ĠV": 691, - "Ġsomet": 692, - "л": 693, - "ens": 694, - "Ġgu": 695, - "Ġac": 696, - "ug": 697, - "Ñĭ": 698, - "ı": 699, - "Ġfirst": 700, - "ree": 701, - "Ġhis": 702, - "ittle": 703, - "Ġimp": 704, - "Ġmo": 705, - "av": 706, - "Ġlittle": 707, - "ĠWhat": 708, - "Ġmuch": 709, - "Ġz": 710, - "Ġê": 711, - "able": 712, - "Ġп": 713, - "Ġpo": 714, - "Ġcomp": 715, - "ne": 716, - "Ġdis": 717, - "Ġlet": 718, - "ance": 719, - "Ġher": 720, - "Ġthings": 721, - "Ġstart": 722, - "ult": 723, - "Ġapp": 724, - "Ġres": 725, - "Ġfo": 726, - "Ġcould": 727, - "Ġinter": 728, - "Ġthose": 729, - "Ġdes": 730, - "Ġwell": 731, - "Ġtwo": 732, - "Ġkind": 733, - "xt": 734, - "ress": 735, - "ely": 736, - "ä": 737, - "Ġbr": 738, - "Ġthr": 739, - "Ġв": 740, - "Ġi": 741, - "ish": 742, - "Ġdiffer": 743, - "Ġro": 744, - "ĠSt": 745, - "Ġsomething": 746, - "Ġtake": 747, - "Ġbo": 748, - "ys": 749, - "Ġshe": 750, - "Ġtalk": 751, - "lo": 752, - "Ñĩ": 753, - "Ġeven": 754, - "к": 755, - "ãĢ": 756, - "Ġн": 757, - "Ġbu": 758, - "ĠIf": 759, - "Ġdown": 760, - "ĠCh": 761, - "ade": 762, - "ations": 763, - "Ġuse": 764, - "ord": 765, - "Ġoff": 766, - "Ġactually": 767, - "Ġspe": 768, - "du": 769, - "ated": 770, - "ater": 771, - "oss": 772, - "ning": 773, - "ü": 774, - "Ġdoes": 775, - "ĠÑģ": 776, - "Ġnew": 777, - "Ġbet": 778, - "vel": 779, - "cess": 780, - "ple": 781, - "Ġhapp": 782, - "ting": 783, - "onna": 784, - "Ġes": 785, - "Ġday": 786, - "Ġonly": 787, - "ign": 788, - "kay": 789, - "sel": 790, - "ents": 791, - "ount": 792, - "ild": 793, - "ile": 794, - "Ġsc": 795, - "Ġhim": 796, - "Ġagain": 797, - "ving": 798, - "Ġgonna": 799, - "Ġcomm": 800, - "Ġhel": 801, - "other": 802, - "Ġke": 803, - "ical": 804, - "Ġ3": 805, - "Ġel": 806, - "Ġthrough": 807, - "Ġcome": 808, - "ark": 809, - "day": 810, - "ier": 811, - "ó": 812, - "Ġthan": 813, - "ĠThey": 814, - "Ġmay": 815, - "Ġser": 816, - "íķ": 817, - "Ġcall": 818, - "Ġdifferent": 819, - "Ġshould": 820, - "ĠThere": 821, - "ary": 822, - "ĠNow": 823, - "ãĤ": 824, - "thing": 825, - "we": 826, - "ory": 827, - "fter": 828, - "Ġput": 829, - "ors": 830, - "ial": 831, - "ëĭ": 832, - "Ġunder": 833, - "Ġinc": 834, - "ĠYe": 835, - "ub": 836, - "form": 837, - "Ġvide": 838, - "à¸": 839, - "vers": 840, - "Ġfeel": 841, - "á": 842, - "ody": 843, - "ft": 844, - "fore": 845, - "Ġem": 846, - "get": 847, - "Ġsaid": 848, - "ition": 849, - "Ġrec": 850, - "ious": 851, - "atch": 852, - "Ġtry": 853, - "Ġhelp": 854, - "Ġshow": 855, - "д": 856, - "Ġbit": 857, - "ull": 858, - "в": 859, - "ÑĤо": 860, - "gr": 861, - "Ġplay": 862, - "ife": 863, - "ail": 864, - "ĠYeah": 865, - "Ġquest": 866, - "Ġmany": 867, - "Ġpers": 868, - "Ġgreat": 869, - "ÃŃ": 870, - "Ġest": 871, - "ng": 872, - "ĠâĻ": 873, - "ty": 874, - "la": 875, - "ĠOh": 876, - "Ġ×": 877, - "à®": 878, - "ĠBe": 879, - "ady": 880, - "Ġmost": 881, - "ction": 882, - "ĠNo": 883, - "Ġdoing": 884, - "Ġbeing": 885, - "Ġtoo": 886, - "ces": 887, - "Ġbl": 888, - ".\"": 889, - "Ġrem": 890, - "iss": 891, - "ons": 892, - ">>": 893, - "ru": 894, - "wn": 895, - "ont": 896, - "ib": 897, - "ell": 898, - "Ġsm": 899, - "oth": 900, - "ual": 901, - "Ġ>>": 902, - "Ġph": 903, - "les": 904, - "oc": 905, - "ful": 906, - "Ġsec": 907, - "ise": 908, - "Ġadd": 909, - "igh": 910, - "ert": 911, - "Ġsame": 912, - "âĢ": 913, - "Ġmean": 914, - "Ġfind": 915, - "ek": 916, - "Ġend": 917, - "--": 918, - "м": 919, - "Ġstill": 920, - "az": 921, - "Ġ'": 922, - "Ġmin": 923, - "Ġyears": 924, - "urn": 925, - "Ġaround": 926, - "self": 927, - "Ġwr": 928, - "bs": 929, - "ought": 930, - "ĠâĻª": 931, - "Ġfl": 932, - "ange": 933, - "Ġafter": 934, - "Ġpoint": 935, - "mer": 936, - "ved": 937, - "Ġlong": 938, - "oy": 939, - "ä¸": 940, - "Ġcr": 941, - "ways": 942, - "Ġsy": 943, - "Ġtra": 944, - "Ġ20": 945, - "ave": 946, - "Ġche": 947, - "Ġent": 948, - "Ġbefore": 949, - "ph": 950, - "Ġatt": 951, - "ian": 952, - "ily": 953, - "Ġperson": 954, - "Ġbig": 955, - "Ġsch": 956, - "Ġreal": 957, - "Ġnext": 958, - "Ġlove": 959, - "Ġvideo": 960, - "ĠLet": 961, - "Ġfin": 962, - "Ġmak": 963, - "ible": 964, - "Ġtoday": 965, - "erm": 966, - "ĠAl": 967, - "ower": 968, - "ann": 969, - "ix": 970, - "Ġpar": 971, - "Ġstud": 972, - "ö": 973, - "Ġimport": 974, - "te": 975, - "Ġgive": 976, - "ves": 977, - "Ġdie": 978, - "Ġdec": 979, - "Ġtell": 980, - "Ġк": 981, - "ÑģÑĤ": 982, - "Ġwhy": 983, - "ically": 984, - "ict": 985, - "red": 986, - "Ġbas": 987, - "Ġsure": 988, - "Ġbel": 989, - "ating": 990, - "Ġtak": 991, - "Ġset": 992, - "Ġlife": 993, - "Ġdidn": 994, - "ا": 995, - "ob": 996, - "und": 997, - "ath": 998, - "Ġop": 999, - "Ġо": 1000, - "ait": 1001, - "Ġworld": 1002, - "Ġsupp": 1003, - "io": 1004, - "Ġcour": 1005, - "Ġи": 1006, - "ward": 1007, - "ен": 1008, - "Ġalways": 1009, - "up": 1010, - "Ġhand": 1011, - "ĠHow": 1012, - "cial": 1013, - "Ġcons": 1014, - "ĠÑ": 1015, - "Ġind": 1016, - "Ġ4": 1017, - "ĠAs": 1018, - "Ġfun": 1019, - "ject": 1020, - "Ġimportant": 1021, - "Ġsur": 1022, - "ew": 1023, - "ates": 1024, - "Ġ5": 1025, - "Ġdi": 1026, - "Ġmade": 1027, - "Ġins": 1028, - "Ġask": 1029, - "Ġet": 1030, - "Ġnum": 1031, - "Ġcar": 1032, - "ĠOkay": 1033, - "Ġsim": 1034, - "ik": 1035, - "Ġlast": 1036, - "ĠGo": 1037, - "Ġmus": 1038, - "Ġrel": 1039, - "ular": 1040, - "´ì": 1041, - "ĠWell": 1042, - "pect": 1043, - "ĠThank": 1044, - "Ġthree": 1045, - "ã": 1046, - "ãĥ": 1047, - "Ġinv": 1048, - "Ġgen": 1049, - "lic": 1050, - "Ġhappen": 1051, - "ëĬ": 1052, - "ien": 1053, - "ever": 1054, - "ов": 1055, - "Ġstr": 1056, - "ĠAll": 1057, - "Ġinst": 1058, - "ĠâĢ": 1059, - "Ġdef": 1060, - "Ġsl": 1061, - "Ġmight": 1062, - "ung": 1063, - "Ġyear": 1064, - "Ġown": 1065, - "Ġkeep": 1066, - "body": 1067, - "der": 1068, - "ĠÑĤ": 1069, - "Ġд": 1070, - "Ġanother": 1071, - "Ġmod": 1072, - "Ġev": 1073, - "Ġguys": 1074, - "Ġable": 1075, - "ão": 1076, - "que": 1077, - "ident": 1078, - "ĠYes": 1079, - "Ġits": 1080, - "Ġplace": 1081, - "Ġprodu": 1082, - "arn": 1083, - "Ġм": 1084, - "Ġrep": 1085, - "Ġexper": 1086, - "Ġfam": 1087, - "ities": 1088, - "ific": 1089, - "Ġhigh": 1090, - "ied": 1091, - "ool": 1092, - "iew": 1093, - "еÑĤ": 1094, - "ren": 1095, - "Ġdone": 1096, - "Ġ...": 1097, - "ëĬĶ": 1098, - "stem": 1099, - "ĠSe": 1100, - "Ġbetter": 1101, - "come": 1102, - "Ġdel": 1103, - "Ġty": 1104, - "Ġum": 1105, - "Ġho": 1106, - "ĠAn": 1107, - "Ġmon": 1108, - "ings": 1109, - "Ġsk": 1110, - "Ġob": 1111, - "com": 1112, - "blem": 1113, - "ope": 1114, - "stand": 1115, - "'d": 1116, - "ments": 1117, - "Ġele": 1118, - "ĠIs": 1119, - "Ġda": 1120, - "Ġreg": 1121, - "lease": 1122, - "ike": 1123, - "als": 1124, - "ize": 1125, - "ê°": 1126, - "Ġcare": 1127, - "Ġnever": 1128, - "ìĿ´": 1129, - "ese": 1130, - "Ġmet": 1131, - "olog": 1132, - "ĠWhen": 1133, - "uck": 1134, - "еÑĢ": 1135, - "Ġé": 1136, - "Ġdat": 1137, - "ç": 1138, - "Ġexam": 1139, - "ility": 1140, - "Ġdet": 1141, - "cri": 1142, - "Ġused": 1143, - "ĠDo": 1144, - "Ġtrans": 1145, - "eg": 1146, - "ten": 1147, - "Ñİ": 1148, - "cus": 1149, - "Ġsecond": 1150, - "Ġbest": 1151, - "Ġhard": 1152, - "Ġide": 1153, - "Ġproblem": 1154, - "ê³": 1155, - "ĠUn": 1156, - "Ñħ": 1157, - "ĠÎ": 1158, - "Ġwatch": 1159, - "ĠSh": 1160, - "atter": 1161, - "Ġpret": 1162, - "Ġder": 1163, - "Ġcourse": 1164, - "ÅŁ": 1165, - "ative": 1166, - "ics": 1167, - "Ġquestion": 1168, - "ute": 1169, - "ìĹ": 1170, - "ĠFor": 1171, - "ather": 1172, - "Ġcol": 1173, - "iend": 1174, - "Ġí": 1175, - "ĠZ": 1176, - "Ġdoesn": 1177, - "arch": 1178, - "Ġinterest": 1179, - "Ġpol": 1180, - "Ġcor": 1181, - "ience": 1182, - "Ġpres": 1183, - "Ġeach": 1184, - "Ġsystem": 1185, - "Ġfact": 1186, - "iel": 1187, - "ably": 1188, - "Ġer": 1189, - "Ġrun": 1190, - "ĠìĿ": 1191, - "Ġtop": 1192, - "ner": 1193, - "Ġthought": 1194, - "Ġeas": 1195, - "ient": 1196, - "Ġcre": 1197, - "ÑĪ": 1198, - "Ġcommun": 1199, - "ye": 1200, - "ready": 1201, - "llow": 1202, - "Ġeverything": 1203, - "omm": 1204, - "Ġmed": 1205, - "ļĶ": 1206, - "Ġcount": 1207, - "its": 1208, - "Ġcompl": 1209, - "hip": 1210, - "ÙĦ": 1211, - "ook": 1212, - "Ġtoget": 1213, - "Ġtogether": 1214, - "amp": 1215, - "Ġgame": 1216, - "Ġalready": 1217, - "ал": 1218, - "Ġcalled": 1219, - "ale": 1220, - "ÅĤ": 1221, - "ĠMy": 1222, - "Ġunderstand": 1223, - "Ġdr": 1224, - "Ġmom": 1225, - "ited": 1226, - "ол": 1227, - "Ġusing": 1228, - "zy": 1229, - "Ġnumber": 1230, - "ãĢģ": 1231, - "ced": 1232, - "Ġcle": 1233, - "но": 1234, - "ëĭ¤": 1235, - "ince": 1236, - "Ġlooking": 1237, - "Ġpretty": 1238, - "Ġprob": 1239, - "ĠShe": 1240, - "Ġve": 1241, - "Ġgetting": 1242, - "Ġweek": 1243, - "Ġeff": 1244, - "uff": 1245, - "air": 1246, - "ues": 1247, - "ern": 1248, - "ĠQ": 1249, - "oup": 1250, - "ention": 1251, - "Ġside": 1252, - "ом": 1253, - "Ġform": 1254, - "Ġbus": 1255, - "Ġass": 1256, - "Ġed": 1257, - "ason": 1258, - "ween": 1259, - "âĢ¦": 1260, - "Ġturn": 1261, - "Ġcur": 1262, - "Ġcoll": 1263, - "Ġdire": 1264, - "ĠGod": 1265, - "Ġ10": 1266, - "Ġequ": 1267, - "Ġб": 1268, - "Ġopen": 1269, - "Ġsuch": 1270, - "ird": 1271, - "ак": 1272, - "Ġear": 1273, - "ÄĻ": 1274, - "gan": 1275, - "Ġpartic": 1276, - "Ġfriend": 1277, - "Ġexp": 1278, - "Ġext": 1279, - "Ġhome": 1280, - "Ġwater": 1281, - "ĠOn": 1282, - "ÑĤÑĮ": 1283, - "ork": 1284, - "ĠпÑĢ": 1285, - "Ġmove": 1286, - "ness": 1287, - "ense": 1288, - "ho": 1289, - "Ġchar": 1290, - "co": 1291, - "ins": 1292, - "Ġboth": 1293, - "Ġ19": 1294, - "Ġgra": 1295, - "Ġbetween": 1296, - "á»": 1297, - "Ġìķ": 1298, - "ash": 1299, - "ĠRe": 1300, - "ai": 1301, - "alth": 1302, - "ures": 1303, - "ember": 1304, - "Ġav": 1305, - "Ġver": 1306, - "ê": 1307, - "oney": 1308, - "Ġthank": 1309, - "Ġmaybe": 1310, - "uc": 1311, - "ime": 1312, - "ê³ł": 1313, - "Ġaway": 1314, - "Ġname": 1315, - "ouse": 1316, - "Ġacc": 1317, - "Ġmusic": 1318, - "Ġchange": 1319, - "Ġpass": 1320, - "ger": 1321, - "Ġbuild": 1322, - "Ġval": 1323, - "iness": 1324, - "any": 1325, - "Ġfew": 1326, - "´ë": 1327, - "ta": 1328, - "Ġlist": 1329, - "Ã¥": 1330, - "Ġold": 1331, - "Ġìŀ": 1332, - "Ġsort": 1333, - "Ġmem": 1334, - "Ġca": 1335, - "cept": 1336, - "Ġgener": 1337, - "Ġyeah": 1338, - "Ġwhile": 1339, - "Ġanything": 1340, - "ric": 1341, - "gram": 1342, - "Ġein": 1343, - "cy": 1344, - "uring": 1345, - "ĠDe": 1346, - "Ġpower": 1347, - "Ġcoming": 1348, - "Ġword": 1349, - "Ġ--": 1350, - "Ġbelie": 1351, - "Ġfound": 1352, - "to": 1353, - "п": 1354, - "Ġmeans": 1355, - "Ġinform": 1356, - "ĠØ": 1357, - "ĠÑĩ": 1358, - "Ġsmall": 1359, - "000": 1360, - "Ġcame": 1361, - "Ġíķ": 1362, - "wh": 1363, - "Ġworking": 1364, - "Ġexample": 1365, - "Ġpos": 1366, - "Ġdep": 1367, - "ê²": 1368, - "äº": 1369, - "ote": 1370, - "Ġdem": 1371, - "ì§": 1372, - "ts": 1373, - "Ġvar": 1374, - "aut": 1375, - "Ġtri": 1376, - "chn": 1377, - "Ġhead": 1378, - "Ġwhole": 1379, - "×Ļ": 1380, - "ze": 1381, - "Ġtrying": 1382, - "Ġtem": 1383, - "Ġcou": 1384, - "ets": 1385, - "Ġ6": 1386, - "Ġfil": 1387, - "velop": 1388, - "Ġcase": 1389, - "à¯": 1390, - "Ġprobably": 1391, - "Ġokay": 1392, - "Ġplan": 1393, - "Ġsit": 1394, - "Ġschool": 1395, - "ĠThen": 1396, - "¸ë": 1397, - "me": 1398, - "Ġprocess": 1399, - "Ġfar": 1400, - "Ġread": 1401, - "Ġposs": 1402, - "Ġbre": 1403, - "Ġsol": 1404, - "icht": 1405, - "Ġsupport": 1406, - "ĠTo": 1407, - "ertain": 1408, - "Ġstarted": 1409, - "Ġcap": 1410, - "Ġleft": 1411, - "Ġdata": 1412, - "Ġtimes": 1413, - "ел": 1414, - "Ġwanted": 1415, - "ан": 1416, - "Ġtalking": 1417, - "Ġist": 1418, - "Ġhaving": 1419, - "ump": 1420, - "Ġcontin": 1421, - "Ġsub": 1422, - "Ġз": 1423, - "pr": 1424, - "ëĭĪ": 1425, - "ina": 1426, - "ż": 1427, - "Ġcreat": 1428, - "ode": 1429, - "×ķ": 1430, - "æĺ": 1431, - "!!": 1432, - "Ġterm": 1433, - "ism": 1434, - "од": 1435, - "ĠBecause": 1436, - "Ġwent": 1437, - "ider": 1438, - "Ġprov": 1439, - "Ġchild": 1440, - "Ġden": 1441, - "Ġlight": 1442, - "br": 1443, - "³Ð¾": 1444, - "oh": 1445, - "Ġbook": 1446, - "ĠÙ": 1447, - "ution": 1448, - "ĠJust": 1449, - "ene": 1450, - "Ġfour": 1451, - "Ġvis": 1452, - "ê°Ģ": 1453, - "Ġhope": 1454, - "Ġmaking": 1455, - "ĠLe": 1456, - "ìķ": 1457, - "Ġopp": 1458, - "au": 1459, - "Ġmoney": 1460, - "Ġprogram": 1461, - "è": 1462, - "Ġstand": 1463, - "IN": 1464, - "Ġsign": 1465, - "Ġlearn": 1466, - "Ãł": 1467, - "ĠDon": 1468, - "Ġteam": 1469, - "Ġна": 1470, - "lud": 1471, - "Ġrest": 1472, - "ices": 1473, - "æľ": 1474, - "ĠÑĢ": 1475, - "Ġaut": 1476, - "Ġlead": 1477, - "ational": 1478, - "de": 1479, - "gy": 1480, - "Ġnice": 1481, - "Ġdas": 1482, - "Ġdist": 1483, - "Ġhum": 1484, - "ĠOne": 1485, - "æĪ": 1486, - "Ġcomes": 1487, - "Ġjo": 1488, - "Ġcent": 1489, - "Ġexpl": 1490, - "Ġmark": 1491, - "reen": 1492, - "led": 1493, - "gin": 1494, - "ìļĶ": 1495, - "Ġlevel": 1496, - "Ġconf": 1497, - "ush": 1498, - "Ġdevelop": 1499, - "Ġtest": 1500, - "eng": 1501, - "vious": 1502, - "ature": 1503, - "ем": 1504, - "ret": 1505, - "Ġje": 1506, - "Ġstuff": 1507, - "Ġclass": 1508, - "ows": 1509, - "Ġê·": 1510, - "Ġsi": 1511, - "Ġles": 1512, - "rop": 1513, - "çļ": 1514, - "Ġpor": 1515, - "Ġwar": 1516, - "ìĹIJ": 1517, - "Ġeveryone": 1518, - "Ġge": 1519, - "Ġcheck": 1520, - "ott": 1521, - "Ġsing": 1522, - "Ġart": 1523, - "Ġfollow": 1524, - "Ġ201": 1525, - "ĠFr": 1526, - "ais": 1527, - "ìĸ": 1528, - "α": 1529, - "å°": 1530, - "ĠÃł": 1531, - "imes": 1532, - "Ġret": 1533, - "Ġchang": 1534, - "Ġpub": 1535, - "Ġinf": 1536, - "Ġtechn": 1537, - "ada": 1538, - "ives": 1539, - "Ġbeh": 1540, - "æĺ¯": 1541, - "Ġlooks": 1542, - "ãĢĤ": 1543, - "з": 1544, - "ĠWhy": 1545, - "çļĦ": 1546, - "Ġenough": 1547, - "Ġbra": 1548, - "itch": 1549, - "ä»": 1550, - "Ġadv": 1551, - "б": 1552, - "Ġwithout": 1553, - "wer": 1554, - "meric": 1555, - "den": 1556, - "Ġcomplet": 1557, - "Ġidea": 1558, - "ters": 1559, - "ock": 1560, - "Ġdefin": 1561, - "Ġever": 1562, - "Ġgl": 1563, - "Ġonce": 1564, - "Ġbring": 1565, - "Ġsaying": 1566, - "Ġans": 1567, - "Ġhear": 1568, - "nect": 1569, - "Ġless": 1570, - "go": 1571, - "ream": 1572, - "ado": 1573, - "ìŀ": 1574, - "Ġmind": 1575, - "ente": 1576, - "Ġfull": 1577, - "Ġbad": 1578, - "Ġwom": 1579, - "Ġsomeone": 1580, - "Ġdu": 1581, - "Ġwon": 1582, - "Ġcontro": 1583, - "ortun": 1584, - "Ġhealth": 1585, - "Ġcho": 1586, - "ĠAr": 1587, - "Ġconc": 1588, - "Ġinformation": 1589, - "Ġstop": 1590, - "att": 1591, - "ately": 1592, - "ä½": 1593, - "Ġgroup": 1594, - "ĠÑĥ": 1595, - "Ġquite": 1596, - "Ġresp": 1597, - "ER": 1598, - "ught": 1599, - "ê¸": 1600, - "man": 1601, - "ized": 1602, - "ĠBr": 1603, - "Ġremember": 1604, - "Ġfamily": 1605, - "Ġbusiness": 1606, - "aw": 1607, - "Ġspec": 1608, - "Ġau": 1609, - "ĠOr": 1610, - "Äħ": 1611, - "Ġseen": 1612, - "Ġlar": 1613, - "Ġ7": 1614, - "gg": 1615, - "bers": 1616, - "Ġdra": 1617, - "Ġmonth": 1618, - "Ġsays": 1619, - "Ġiss": 1620, - "Ġlive": 1621, - "Ġline": 1622, - "Ġmoment": 1623, - "Ġexc": 1624, - "els": 1625, - "Ġsound": 1626, - "Ġcool": 1627, - "Ġloc": 1628, - "Ġcertain": 1629, - "Ġdri": 1630, - "оÑĤ": 1631, - "ames": 1632, - "Ġmust": 1633, - "ny": 1634, - "иÑĤ": 1635, - "Ġkid": 1636, - "Ġinclud": 1637, - "ìĿĦ": 1638, - "ator": 1639, - "ÄŁ": 1640, - "ha": 1641, - "ared": 1642, - "Ġseem": 1643, - "й": 1644, - "ìĦ": 1645, - "Ġelse": 1646, - "Ġìł": 1647, - "irl": 1648, - "Ġ8": 1649, - "Ġvo": 1650, - "Ġquestions": 1651, - "ines": 1652, - "ee": 1653, - "æĪij": 1654, - "ür": 1655, - "ĠAmeric": 1656, - "Ġstory": 1657, - "Ġserv": 1658, - "vern": 1659, - "ages": 1660, - "land": 1661, - "ĠâĢĵ": 1662, - "era": 1663, - "ĠCan": 1664, - "Ġpop": 1665, - "ether": 1666, - "Ġna": 1667, - "Ġorder": 1668, - "Ġmakes": 1669, - "Ġsince": 1670, - "con": 1671, - "ctor": 1672, - "Ġthough": 1673, - "Ġproduct": 1674, - "ли": 1675, - "Ġleg": 1676, - "Ġmeet": 1677, - "alf": 1678, - "ÑģÑı": 1679, - "unch": 1680, - "iter": 1681, - "ove": 1682, - "×ķ×": 1683, - "iet": 1684, - "ам": 1685, - "ital": 1686, - "Ġsuper": 1687, - "ling": 1688, - "Ġpay": 1689, - "Ġpara": 1690, - "Ġjob": 1691, - "ĠHere": 1692, - "Ġsw": 1693, - "ks": 1694, - "ption": 1695, - "ma": 1696, - "Ġbelieve": 1697, - "¬ë": 1698, - "Ġwait": 1699, - "ой": 1700, - "Ġunt": 1701, - "Ġquick": 1702, - "hr": 1703, - "ĠÑį": 1704, - "ĠPro": 1705, - "Ġmen": 1706, - "à¹": 1707, - "Ġdays": 1708, - "Ġgoes": 1709, - "Ġspeak": 1710, - "ĠAt": 1711, - "ement": 1712, - "Ġmiss": 1713, - "Ġaw": 1714, - "Ġdesign": 1715, - "Ġproject": 1716, - "оÑĢ": 1717, - "ij": 1718, - "ants": 1719, - "ats": 1720, - "ĠChr": 1721, - "Ġ9": 1722, - "Ġcut": 1723, - "Ġrequ": 1724, - "Ġне": 1725, - "ĠNot": 1726, - "aster": 1727, - "Ġmill": 1728, - "Ġparticular": 1729, - "Ġpie": 1730, - "Ġstudents": 1731, - "Ġfive": 1732, - "oun": 1733, - "ĠNe": 1734, - "Ġgi": 1735, - "Ġpas": 1736, - "Ġfree": 1737, - "ĠSp": 1738, - "lich": 1739, - "Ġprof": 1740, - "Ġeng": 1741, - "Ġprot": 1742, - "ĠLike": 1743, - "osed": 1744, - "Ġconnect": 1745, - "app": 1746, - "Ġë§": 1747, - "iting": 1748, - "Ġblo": 1749, - "Ġlos": 1750, - "ists": 1751, - "Ġexperience": 1752, - "rent": 1753, - "Ġstay": 1754, - "Ġfood": 1755, - "ton": 1756, - "ruct": 1757, - "Ġhist": 1758, - "view": 1759, - "ining": 1760, - "most": 1761, - "ivers": 1762, - "bo": 1763, - "ãģĦ": 1764, - "ĠTr": 1765, - "gen": 1766, - "Ġplease": 1767, - "Ġcommunity": 1768, - "Ġce": 1769, - "AN": 1770, - "no": 1771, - "Ġbody": 1772, - "Ġhour": 1773, - "Ġvers": 1774, - "áº": 1775, - "cer": 1776, - "Ġê°": 1777, - "Ġreason": 1778, - "ĠRight": 1779, - "Ġlater": 1780, - "ÏĦ": 1781, - "Ġhouse": 1782, - "ĠX": 1783, - "он": 1784, - "Ġstate": 1785, - "fic": 1786, - "å¤": 1787, - "ÅĽ": 1788, - "ield": 1789, - "Ġpri": 1790, - "Ġpast": 1791, - "Ġwalk": 1792, - "ology": 1793, - "ering": 1794, - "anna": 1795, - "Ġter": 1796, - "Ġhold": 1797, - "Ġorgan": 1798, - "ben": 1799, - "ο": 1800, - "ón": 1801, - "Ġeffect": 1802, - "Ġyourself": 1803, - "Ġplus": 1804, - "aj": 1805, - "ando": 1806, - "ural": 1807, - "Ġroom": 1808, - "lect": 1809, - "ê²Į": 1810, - "?\"": 1811, - "side": 1812, - "Ġbecome": 1813, - "ÑĨ": 1814, - "ĠÂ": 1815, - "ood": 1816, - "Ġconst": 1817, - "Ġnight": 1818, - "utes": 1819, - "ж": 1820, - "Ġbreak": 1821, - "Ġpain": 1822, - "Ġstep": 1823, - "ired": 1824, - "Ġnothing": 1825, - "Ġuntil": 1826, - "Ñĸ": 1827, - "ав": 1828, - "ÙĬ": 1829, - "Ġduring": 1830, - "ì§Ģ": 1831, - "less": 1832, - "oll": 1833, - "нÑĭ": 1834, - "ι": 1835, - "fect": 1836, - "iver": 1837, - "ıĦ": 1838, - "ither": 1839, - "ying": 1840, - "Ġbegin": 1841, - "×Ļ×": 1842, - "ivid": 1843, - "Ġç": 1844, - "Ġsal": 1845, - "Ġta": 1846, - "Ġpot": 1847, - "Ġ$": 1848, - "Ġmar": 1849, - "Ġclear": 1850, - "Ġface": 1851, - "Ġgrow": 1852, - "Ġ*": 1853, - "Ġinside": 1854, - "Ġfriends": 1855, - "Ġleave": 1856, - "enn": 1857, - "Ġeasy": 1858, - "Ġarea": 1859, - "ality": 1860, - "oud": 1861, - "Ġeat": 1862, - "ÙĨ": 1863, - "Ġpur": 1864, - "orn": 1865, - "Ġsaw": 1866, - "Ġanswer": 1867, - "Ġfront": 1868, - "Ġbeaut": 1869, - "¼ë": 1870, - "Ġmatter": 1871, - "Ġson": 1872, - "ĠNew": 1873, - "Ġresult": 1874, - "ides": 1875, - "che": 1876, - "Ġfut": 1877, - "ps": 1878, - "Ġfocus": 1879, - "Ġinteresting": 1880, - "å¥": 1881, - "Ġap": 1882, - "\".": 1883, - "Ġcreate": 1884, - "оÑģ": 1885, - "Ġpress": 1886, - "ross": 1887, - "Ġpick": 1888, - "line": 1889, - "Ġtook": 1890, - "ĠMay": 1891, - "row": 1892, - "Ġich": 1893, - "ĺë": 1894, - "Ġref": 1895, - "Ġmor": 1896, - "ract": 1897, - "arent": 1898, - "AR": 1899, - "Ġexact": 1900, - "Ġspace": 1901, - "work": 1902, - "ни": 1903, - "Ġbir": 1904, - "Ġdev": 1905, - "г": 1906, - "Ġtold": 1907, - "Ġpublic": 1908, - "cially": 1909, - "Ġview": 1910, - "ĠHey": 1911, - "med": 1912, - "llo": 1913, - "cc": 1914, - "Ġfac": 1915, - "Ġcouple": 1916, - "Ġheart": 1917, - "ler": 1918, - "Ġready": 1919, - "Ġalmost": 1920, - "aring": 1921, - "Ġhalf": 1922, - "ĠMe": 1923, - "avor": 1924, - "ique": 1925, - "Ġcharac": 1926, - "Ġpract": 1927, - "ON": 1928, - "ane": 1929, - "Ġil": 1930, - "на": 1931, - "Ġvi": 1932, - "lish": 1933, - "head": 1934, - "Ġleast": 1935, - "Ġbasically": 1936, - "ased": 1937, - "right": 1938, - "Ġyet": 1939, - "Ġtaking": 1940, - "Ġcountry": 1941, - "Ġwin": 1942, - "Ġisn": 1943, - "Ġpossible": 1944, - "Ġcam": 1945, - "Ġincre": 1946, - "Ġpat": 1947, - "Ġwanna": 1948, - "Ġconsider": 1949, - "Ġabs": 1950, - "Ġwithin": 1951, - "Ġhuman": 1952, - "Ġthinking": 1953, - "Ġoh": 1954, - "¡ľ": 1955, - "Ġqui": 1956, - "ases": 1957, - "Ġ0": 1958, - "itely": 1959, - "ä¸į": 1960, - "Ġkill": 1961, - "Ġmil": 1962, - "Ġinvest": 1963, - "ister": 1964, - "Ġsuc": 1965, - "ional": 1966, - "elf": 1967, - "Ġwhether": 1968, - "Ġcontrol": 1969, - "Ġagainst": 1970, - "ots": 1971, - "ëĭĪëĭ¤": 1972, - "ior": 1973, - "Ġpresent": 1974, - "Ġا": 1975, - "Ġwatching": 1976, - "ube": 1977, - "erv": 1978, - "Ġnicht": 1979, - "Ġgovern": 1980, - "ĠThese": 1981, - "Ġ:": 1982, - "uit": 1983, - "ugh": 1984, - "Ġworks": 1985, - "oo": 1986, - "Ġwir": 1987, - "Ġair": 1988, - "ĠTe": 1989, - "аз": 1990, - "ision": 1991, - "where": 1992, - "Ġtot": 1993, - "joy": 1994, - "ìĭ": 1995, - "Ġvol": 1996, - "Ġе": 1997, - "Ġclose": 1998, - "ĠAd": 1999, - "Ñī": 2000, - "ined": 2001, - "Ġuna": 2002, - "Ġê·¸ë": 2003, - "°ë": 2004, - "orry": 2005, - "Ġbro": 2006, - "Ġfilm": 2007, - "ift": 2008, - "20": 2009, - "Ġtype": 2010, - "Ġhappened": 2011, - "ĠAm": 2012, - "Ġgirl": 2013, - "ĠAre": 2014, - "wards": 2015, - "Ġpour": 2016, - "Ġcolor": 2017, - "elt": 2018, - "аÑģ": 2019, - "Ġsense": 2020, - "lex": 2021, - "ĠWith": 2022, - "uss": 2023, - "rib": 2024, - "Ġrese": 2025, - "Ġnorm": 2026, - "Ġfuture": 2027, - "Ġdeal": 2028, - "ending": 2029, - "ey": 2030, - "Ġx": 2031, - "ero": 2032, - "ĠCl": 2033, - "uk": 2034, - "Ġwhatever": 2035, - "selves": 2036, - "Ġyoung": 2037, - "ìĬ": 2038, - "ĠMar": 2039, - "ĠChrist": 2040, - "Ġguess": 2041, - "Ġperform": 2042, - "Ġener": 2043, - "ron": 2044, - "Ġhit": 2045, - "Ġwond": 2046, - "Ġdirect": 2047, - "ĠEvery": 2048, - "Ġoften": 2049, - "Ġfa": 2050, - "Ġalong": 2051, - "Ġclick": 2052, - "ĠLook": 2053, - "Ġsitu": 2054, - "Ġhappy": 2055, - "ead": 2056, - "Ġago": 2057, - "Ġenc": 2058, - "Ġmyself": 2059, - "Ġcover": 2060, - "об": 2061, - "Ġmid": 2062, - "Ġcost": 2063, - "Ġten": 2064, - "ĠSch": 2065, - "Ġexpect": 2066, - "Ġwasn": 2067, - "Ġstrong": 2068, - "iful": 2069, - "Ġopportun": 2070, - "inal": 2071, - "yle": 2072, - "Ġshare": 2073, - "Ġtrue": 2074, - "Ġappro": 2075, - "Ġchall": 2076, - "Ġminutes": 2077, - "Ġchann": 2078, - "ĠëĤ": 2079, - "ε": 2080, - "li": 2081, - "Ġmess": 2082, - "ories": 2083, - "pecially": 2084, - "Ġwrong": 2085, - "Ġyes": 2086, - "ĠìĹ": 2087, - "iron": 2088, - "Ġallow": 2089, - "Ġsubs": 2090, - "Ġfore": 2091, - "Ġfight": 2092, - "Ġsocial": 2093, - "Ġcra": 2094, - "ana": 2095, - "Ġaff": 2096, - "Ġess": 2097, - "Ġways": 2098, - "Ġshort": 2099, - "Ġfall": 2100, - "Ġlaw": 2101, - "ĠWho": 2102, - "Ġenjoy": 2103, - "Ġcal": 2104, - "Ġaccess": 2105, - "fe": 2106, - "Ġnon": 2107, - "Ġacross": 2108, - "ery": 2109, - "viously": 2110, - "ĠEx": 2111, - "ided": 2112, - "Ġlink": 2113, - "ĠPr": 2114, - "Ġterms": 2115, - "aces": 2116, - "Ġland": 2117, - "azing": 2118, - "Ġ15": 2119, - "Ġmult": 2120, - "Ġspecial": 2121, - "åĢ": 2122, - "iving": 2123, - "ìĿĢ": 2124, - "Ġtyp": 2125, - "Ġste": 2126, - "ĠÄ": 2127, - "Ġforward": 2128, - "åı": 2129, - "Ġfre": 2130, - "好": 2131, - "Ġresearch": 2132, - "à¯į": 2133, - "аÑĤ": 2134, - "Ġmain": 2135, - "Ġrecord": 2136, - "Ġhu": 2137, - "Ġdefinitely": 2138, - "Ġeither": 2139, - "Ġlisten": 2140, - "Ġkey": 2141, - "Ġmarket": 2142, - "ĠÑĩÑĤо": 2143, - "ization": 2144, - "Ġvideos": 2145, - "Ġguy": 2146, - "Ġfig": 2147, - "Ġstra": 2148, - "ĠPl": 2149, - "ully": 2150, - "amos": 2151, - "Ġmention": 2152, - "Ġsong": 2153, - "Ġintern": 2154, - "ral": 2155, - "urs": 2156, - "Ġhon": 2157, - "Ġvalue": 2158, - "Ġbar": 2159, - "cle": 2160, - "ож": 2161, - "Äĩ": 2162, - "ľë": 2163, - "Ġzu": 2164, - "им": 2165, - "ä½ł": 2166, - "Ġsingle": 2167, - "Ġauch": 2168, - "cuss": 2169, - "Ġgets": 2170, - "Ġsometimes": 2171, - "å¾": 2172, - "amb": 2173, - "mm": 2174, - "cing": 2175, - "Ġperfect": 2176, - "ĠBl": 2177, - "outh": 2178, - "ìł": 2179, - "Ġsci": 2180, - "par": 2181, - "Ġred": 2182, - "Ġpost": 2183, - "Ġmot": 2184, - "Ġelect": 2185, - "ĠEu": 2186, - "itive": 2187, - "ĠSome": 2188, - "Ġdescri": 2189, - "Ġcurrent": 2190, - "és": 2191, - "Ġtre": 2192, - "ĠEn": 2193, - "Ġmit": 2194, - "EN": 2195, - "Īë": 2196, - "ium": 2197, - "Ġheard": 2198, - "Ġsimple": 2199, - "lar": 2200, - "Ġeverybody": 2201, - "ilar": 2202, - "Ġneeds": 2203, - "Ġdiffic": 2204, - "ĠGood": 2205, - "ument": 2206, - "cent": 2207, - "Ġoper": 2208, - "аÑĤÑĮ": 2209, - "ety": 2210, - "Ġblack": 2211, - "Ġgiven": 2212, - "ones": 2213, - "Ġwel": 2214, - "éĢ": 2215, - "ĠìķĦ": 2216, - "Ġ30": 2217, - "AT": 2218, - "Ġstat": 2219, - "ouch": 2220, - "ĠMr": 2221, - "аÑĢ": 2222, - "Ġsho": 2223, - "Ġcond": 2224, - "×Ķ": 2225, - "my": 2226, - "Ġchildren": 2227, - "Ġeu": 2228, - "ед": 2229, - "ìķĦ": 2230, - "tern": 2231, - "Ġuh": 2232, - "Ġhar": 2233, - "Ġprom": 2234, - "Ġpull": 2235, - "rew": 2236, - "Ġcompany": 2237, - "Ġbeautiful": 2238, - "ustom": 2239, - "íķĺ": 2240, - "ки": 2241, - "Ġstre": 2242, - "Ġamazing": 2243, - "ries": 2244, - "Ġsuccess": 2245, - "Ġmach": 2246, - "not": 2247, - "Ġdiscuss": 2248, - "Ġnat": 2249, - "¦¬": 2250, - "Ġune": 2251, - "Ġdifficult": 2252, - "Ġris": 2253, - "ν": 2254, - "Ġcamp": 2255, - "Ġbuy": 2256, - "ä¸Ģ": 2257, - "Ġmag": 2258, - "po": 2259, - "ĠYour": 2260, - "Ġbehind": 2261, - "ica": 2262, - "ın": 2263, - "ĠOK": 2264, - "Ġlang": 2265, - "Ġwomen": 2266, - "Ġenv": 2267, - "Ġrece": 2268, - "Ġchannel": 2269, - "ially": 2270, - "ule": 2271, - "Ġ12": 2272, - "thers": 2273, - "Ġbott": 2274, - "Ġreport": 2275, - "ently": 2276, - "fully": 2277, - "The": 2278, - "Ġsent": 2279, - "Ġevent": 2280, - "Ġenergy": 2281, - "lt": 2282, - "Ġwords": 2283, - "arr": 2284, - "dle": 2285, - "Ġahead": 2286, - "ards": 2287, - "ر": 2288, - "äºĨ": 2289, - "Ġtool": 2290, - "conom": 2291, - "еÑģ": 2292, - "Ġexactly": 2293, - "Ġfavor": 2294, - "Ġlow": 2295, - "Ġproper": 2296, - "ĠìŀĪ": 2297, - "Ġ!": 2298, - "Ġrelations": 2299, - "Ġmas": 2300, - "Ġkids": 2301, - "Ġentire": 2302, - "ude": 2303, - "Ùħ": 2304, - "ĠWhere": 2305, - "Ġones": 2306, - "Ġcity": 2307, - "olut": 2308, - "Ġsix": 2309, - "ability": 2310, - "ör": 2311, - "ili": 2312, - "ĠEs": 2313, - "Ġhappens": 2314, - "ains": 2315, - "Ġmodel": 2316, - "Ġpict": 2317, - "Ġespecially": 2318, - "Ġ100": 2319, - "kt": 2320, - "Ġsoon": 2321, - "by": 2322, - "rodu": 2323, - "Ġann": 2324, - "Ġsubscri": 2325, - "ĠQu": 2326, - "Ġavail": 2327, - "iment": 2328, - "Ġvoc": 2329, - "ka": 2330, - "Ġ200": 2331, - "aper": 2332, - "ĠInd": 2333, - "Ġì§": 2334, - "hor": 2335, - "į°": 2336, - "jor": 2337, - "ил": 2338, - "Ġsqu": 2339, - "AU": 2340, - "arning": 2341, - "Ġг": 2342, - "IS": 2343, - "Ġл": 2344, - "ей": 2345, - "yes": 2346, - "åħ": 2347, - "ĠÐĴ": 2348, - "Ġorig": 2349, - "ого": 2350, - "Ġasked": 2351, - "ilt": 2352, - "ог": 2353, - "Ġcontinue": 2354, - "Ġìĺ": 2355, - "ram": 2356, - "Ġothers": 2357, - "ES": 2358, - "ohn": 2359, - "Ġlay": 2360, - "Ġbased": 2361, - "Ġpu": 2362, - "Ġappe": 2363, - "Ġlim": 2364, - "Ġprop": 2365, - "Ģë": 2366, - "min": 2367, - "Ġhot": 2368, - "ĠLa": 2369, - "Ġfast": 2370, - "Ġprotect": 2371, - "Ġamount": 2372, - "Ġaqu": 2373, - "Ġfund": 2374, - "Ġcustom": 2375, - "Ġcult": 2376, - "Ġhands": 2377, - "Ġhaven": 2378, - "Ġaud": 2379, - "Ġoutside": 2380, - "ĠAfter": 2381, - "aps": 2382, - "Ġanim": 2383, - "ploy": 2384, - "Ġhat": 2385, - "ĠFirst": 2386, - "Ġtreat": 2387, - "Ġep": 2388, - "Ġmater": 2389, - "Ġbuilding": 2390, - "Ġë°": 2391, - "åIJ": 2392, - "ìĦľ": 2393, - "za": 2394, - "ughter": 2395, - "ĠPe": 2396, - "ney": 2397, - "eter": 2398, - "atic": 2399, - "Ġeduc": 2400, - "기": 2401, - "Ġmov": 2402, - "ĵ¤": 2403, - "ama": 2404, - "ration": 2405, - "Ġsn": 2406, - "ÙĪ": 2407, - "Ġsum": 2408, - "Ġphot": 2409, - "ĠÐĿ": 2410, - "Ġ.": 2411, - "æľī": 2412, - "Ġfinish": 2413, - "itting": 2414, - "å®": 2415, - "Ġlarge": 2416, - "Ġìĸ": 2417, - "Ġwhite": 2418, - "ara": 2419, - "Ġmais": 2420, - "ĠHi": 2421, - "Ġdam": 2422, - "ĠاÙĦ": 2423, - "Ġbox": 2424, - "ĠHello": 2425, - "Ġsle": 2426, - "Ġopt": 2427, - "ried": 2428, - "¥¼": 2429, - "Ġactiv": 2430, - "Ġnão": 2431, - "ĠCom": 2432, - "Ġplaying": 2433, - "Th": 2434, - "Ġavailable": 2435, - "Ġport": 2436, - "åĪ": 2437, - "ĠAh": 2438, - "Ġlas": 2439, - "Ġearly": 2440, - "Ġwonder": 2441, - "±°": 2442, - "Ġ18": 2443, - "cul": 2444, - "Ġfunction": 2445, - "Ġmorning": 2446, - "lle": 2447, - "ients": 2448, - "ux": 2449, - "Ġcir": 2450, - "itions": 2451, - "Ġdeep": 2452, - "Ġpolit": 2453, - "yor": 2454, - "mp": 2455, - "aking": 2456, - "Įë": 2457, - "ĠMan": 2458, - "Ġmillion": 2459, - "Ġ/": 2460, - "Ġindivid": 2461, - "Ġpan": 2462, - "Ġgovernment": 2463, - "Ġwrite": 2464, - "ĠTod": 2465, - "ament": 2466, - "ĠÏ": 2467, - "Ġwind": 2468, - "ĠEng": 2469, - "chen": 2470, - "Wh": 2471, - "ìľ": 2472, - "Ġident": 2473, - "ãģ§": 2474, - "vent": 2475, - "urch": 2476, - "Ġhy": 2477, - "Ġya": 2478, - "Ġtrad": 2479, - "Ġrelationship": 2480, - "ú": 2481, - "Ġdou": 2482, - "OR": 2483, - "Ġswe": 2484, - "Ġneg": 2485, - "ination": 2486, - "Ġtext": 2487, - "ipp": 2488, - "Ġfine": 2489, - "ás": 2490, - "ĠDr": 2491, - "ĠCome": 2492, - "Ġmonths": 2493, - ",\"": 2494, - "ени": 2495, - "Ġhours": 2496, - "Ġpod": 2497, - "irt": 2498, - "Ġinvol": 2499, - "Ġcollect": 2500, - "Ġauf": 2501, - "Ġpa": 2502, - "Ġhistory": 2503, - "mb": 2504, - "ify": 2505, - "Ġ?": 2506, - "Ġbelow": 2507, - "asure": 2508, - "aby": 2509, - "Ġlangu": 2510, - "Ġant": 2511, - "Ġcomb": 2512, - "ato": 2513, - "Ġexist": 2514, - "Ġëĭ": 2515, - "Ġtakes": 2516, - "Ġcharacter": 2517, - "aff": 2518, - "Ġfield": 2519, - "Ġeconom": 2520, - "ief": 2521, - "Ġpiece": 2522, - "åľ": 2523, - "Ġreach": 2524, - "Ġê²": 2525, - "ony": 2526, - "Ġmaterial": 2527, - "Ġdig": 2528, - "Ġphys": 2529, - "Ġimpro": 2530, - "Ġsimilar": 2531, - "IC": 2532, - "Ġnet": 2533, - "yn": 2534, - "Ġposition": 2535, - "ÃŁ": 2536, - "Ġbene": 2537, - "read": 2538, - "Ġlearning": 2539, - "ume": 2540, - "Ġclean": 2541, - "ÑĤоÑĢ": 2542, - "Ġcook": 2543, - "Ġseems": 2544, - "Ġol": 2545, - "ĠUS": 2546, - "ĠJes": 2547, - "Ġà®": 2548, - "ential": 2549, - "iversity": 2550, - "acy": 2551, - "ĠÑı": 2552, - "olutely": 2553, - "rect": 2554, - "ĠPlease": 2555, - "Ġrepres": 2556, - "Ġtouch": 2557, - "men": 2558, - "Ġа": 2559, - "ión": 2560, - "ĠThanks": 2561, - "Ġang": 2562, - "Ġmajor": 2563, - "Ġitself": 2564, - "ills": 2565, - "\",": 2566, - "ians": 2567, - "Ġscreen": 2568, - "Ġhor": 2569, - "Ġknown": 2570, - "Ġenviron": 2571, - "Ġfinal": 2572, - "Ġfigure": 2573, - "ĠTw": 2574, - "Ġeyes": 2575, - "Ġimag": 2576, - "Ġseeing": 2577, - "Ġhair": 2578, - "rem": 2579, - "Ġapplic": 2580, - "ends": 2581, - "put": 2582, - "Ġnews": 2583, - "Ġcompletely": 2584, - "ughs": 2585, - "Ġknew": 2586, - "ified": 2587, - "ĠJe": 2588, - "ĠDid": 2589, - "Ġsituation": 2590, - "Ġflo": 2591, - "ms": 2592, - "Ġphone": 2593, - "Ġball": 2594, - "do": 2595, - "Ġparent": 2596, - "Ġsorry": 2597, - "ury": 2598, - "ин": 2599, - "ips": 2600, - "ад": 2601, - "Ġinstead": 2602, - "Ġhuge": 2603, - "Ġtu": 2604, - "Ġãģ": 2605, - "ĠGr": 2606, - "Ġdetail": 2607, - "ĠÐŁ": 2608, - "Ġindividual": 2609, - "Ġfire": 2610, - "Ġclos": 2611, - "Ġwer": 2612, - "une": 2613, - "Ġrunning": 2614, - "Ġconvers": 2615, - "Ġrecomm": 2616, - "Ġcomo": 2617, - "Ġsomebody": 2618, - "ĠJohn": 2619, - "ĠìĿ´": 2620, - "ĠOur": 2621, - "ples": 2622, - "ĠPh": 2623, - "Ġanal": 2624, - "Ġ50": 2625, - "Ġoffer": 2626, - "Ġ<": 2627, - "itional": 2628, - "gest": 2629, - "Ġvous": 2630, - "let": 2631, - "icy": 2632, - "Ġfeeling": 2633, - "LE": 2634, - "ros": 2635, - "Ġthird": 2636, - "ок": 2637, - "Ġseries": 2638, - "ĠAny": 2639, - "ised": 2640, - "old": 2641, - "Ġdraw": 2642, - "Ġservice": 2643, - "Ġcannot": 2644, - "bal": 2645, - "ãģĨ": 2646, - "Ġliving": 2647, - "ım": 2648, - "Ġdifference": 2649, - "Ġopportunity": 2650, - "Ġnear": 2651, - "orth": 2652, - "ken": 2653, - "Ġlocal": 2654, - "ت": 2655, - "ĠCon": 2656, - "Ġobject": 2657, - "Ġdass": 2658, - "ãģĻ": 2659, - "IJ×": 2660, - "Ġquickly": 2661, - "raph": 2662, - "Ġissues": 2663, - "éĢĻ": 2664, - "ĠAmerican": 2665, - "Ġprep": 2666, - "ences": 2667, - "Ġprofess": 2668, - "lling": 2669, - "of": 2670, - "Ġfoot": 2671, - "bre": 2672, - "Ġusually": 2673, - "Ġgeneral": 2674, - "da": 2675, - "ances": 2676, - "Ġdest": 2677, - "Ġocc": 2678, - "Ġmembers": 2679, - "Ġdans": 2680, - "Ġequal": 2681, - "zt": 2682, - "Ġbecom": 2683, - "Ġmoving": 2684, - "Ġspecific": 2685, - "ÃŃa": 2686, - "Ġfur": 2687, - "Ġnecess": 2688, - "Ġcommon": 2689, - "Ġattack": 2690, - "ĠÑįÑĤо": 2691, - "ĠToday": 2692, - "Ġuns": 2693, - "ĠGu": 2694, - "iod": 2695, - "Ġaccount": 2696, - "Ġgrand": 2697, - "Ġself": 2698, - "ĠEl": 2699, - "Ġtast": 2700, - "Ġcontent": 2701, - "Ġcu": 2702, - "Ħë": 2703, - "ĠMaybe": 2704, - "ĠJesus": 2705, - "ores": 2706, - "port": 2707, - "©´": 2708, - "Ġgives": 2709, - "Ġnormal": 2710, - "ÑĢÑĥ": 2711, - "Ġimpact": 2712, - "är": 2713, - "Ġdies": 2714, - "Ġlab": 2715, - "sh": 2716, - "ios": 2717, - "ĠPres": 2718, - "ĠUnd": 2719, - "ĠOf": 2720, - "Ġfinally": 2721, - "Ġdoll": 2722, - "Ġvocê": 2723, - "ply": 2724, - "ĠAg": 2725, - "Ġtaken": 2726, - "Ġground": 2727, - "fort": 2728, - "Ġgave": 2729, - "ĠInst": 2730, - "Ġlost": 2731, - "Ġworked": 2732, - "Ġliter": 2733, - "Ġissue": 2734, - "Ġindust": 2735, - "Ġreturn": 2736, - "Ġhappening": 2737, - "Ġwants": 2738, - "ив": 2739, - "Ġproblems": 2740, - "ĠCar": 2741, - "Ŀ¼": 2742, - "ĠAlso": 2743, - "Ġsize": 2744, - "Ġobviously": 2745, - "ĠSu": 2746, - "ĠSc": 2747, - "Ġrecommend": 2748, - "ources": 2749, - "astic": 2750, - "....": 2751, - "Ġmi": 2752, - "lier": 2753, - "ĠEven": 2754, - "cia": 2755, - "Ġhur": 2756, - "va": 2757, - "Ġmass": 2758, - "Ġwouldn": 2759, - "unt": 2760, - "cks": 2761, - "Ġfelt": 2762, - "osp": 2763, - "light": 2764, - "олÑĮ": 2765, - "nie": 2766, - "Ġbottom": 2767, - "ĠбÑĭ": 2768, - "ored": 2769, - "ison": 2770, - "Ġgrad": 2771, - "Ġuma": 2772, - "Ġva": 2773, - "ĠìĤ": 2774, - "ression": 2775, - "ulation": 2776, - "ID": 2777, - "idence": 2778, - "Ġbur": 2779, - "Ġgone": 2780, - "lu": 2781, - "ìĸ´ì": 2782, - "Ġredu": 2783, - "Ġja": 2784, - "ìĿĺ": 2785, - "ita": 2786, - "Ġsoft": 2787, - "Ġça": 2788, - "ico": 2789, - "eral": 2790, - "ñ": 2791, - "af": 2792, - "Ġpoints": 2793, - "gu": 2794, - "Ġdé": 2795, - "apt": 2796, - "ax": 2797, - "ĠAlright": 2798, - "Ġcamera": 2799, - "Ġach": 2800, - "Ġпо": 2801, - "Ġsever": 2802, - "50": 2803, - "Ġsie": 2804, - "Ïģ": 2805, - "Ġmal": 2806, - "Ġcomput": 2807, - "Ġmiddle": 2808, - "Ġcouldn": 2809, - "ming": 2810, - "Ġìĭ": 2811, - "ĠHis": 2812, - "Ġgames": 2813, - "Ġintrodu": 2814, - "Ġcell": 2815, - "por": 2816, - "Ġsleep": 2817, - "Ġë³": 2818, - "iding": 2819, - "Ġou": 2820, - "Ġdeg": 2821, - "Ġdrink": 2822, - "Ġenvironment": 2823, - "ĠUnited": 2824, - "Ġtalked": 2825, - "Ġchoose": 2826, - "Ġjour": 2827, - "ege": 2828, - "ĠMin": 2829, - "Ġinte": 2830, - "Ġrather": 2831, - "Ġoffic": 2832, - "ка": 2833, - "aching": 2834, - "Ġmentioned": 2835, - "Ġfill": 2836, - "Ġtrack": 2837, - "Ġnie": 2838, - "Ġut": 2839, - "ĠвÑĭ": 2840, - "ibility": 2841, - "Ġvac": 2842, - "Ġrad": 2843, - "Ġpack": 2844, - "Ġsend": 2845, - "ĠDas": 2846, - "ĠAb": 2847, - "Ġengine": 2848, - "ãģĹ": 2849, - "Ġcompet": 2850, - "ô": 2851, - "ĠвÑģ": 2852, - "Ġdoor": 2853, - "Ġlonger": 2854, - "å°į": 2855, - "Ġlanguage": 2856, - "Ġextra": 2857, - "play": 2858, - "Ġwebs": 2859, - "umb": 2860, - "room": 2861, - "çľ": 2862, - "Ġbeginning": 2863, - "Ġrefer": 2864, - "AM": 2865, - "nen": 2866, - "igher": 2867, - "face": 2868, - "erc": 2869, - "Ġforget": 2870, - "Ġcomment": 2871, - "ек": 2872, - "лÑı": 2873, - "ror": 2874, - "że": 2875, - "ĠGe": 2876, - "Ġdark": 2877, - "Ġanyone": 2878, - "ante": 2879, - "ges": 2880, - "ìĬµ": 2881, - "Ñij": 2882, - "bed": 2883, - "je": 2884, - "ructure": 2885, - "Ġprim": 2886, - "ida": 2887, - "è¦": 2888, - "ãģ¾": 2889, - "Ġmix": 2890, - "Ġstarting": 2891, - "ĠìĿ´ë": 2892, - "Ġprovide": 2893, - "action": 2894, - "Ġmother": 2895, - "Ġperiod": 2896, - "Ġstick": 2897, - "ĠYouT": 2898, - "Ġtechnology": 2899, - "ê¹": 2900, - "Ġbed": 2901, - "Ġgiving": 2902, - "Ġexplain": 2903, - "zen": 2904, - "imate": 2905, - "Ġrepresent": 2906, - "load": 2907, - "ĠHowever": 2908, - "Ġlives": 2909, - "uth": 2910, - "irit": 2911, - "ogn": 2912, - "Ġlik": 2913, - "Ġrespons": 2914, - "Ġpriv": 2915, - "Ġtom": 2916, - "ção": 2917, - "iam": 2918, - "Ġexcited": 2919, - "Ġcard": 2920, - "ground": 2921, - "Ġ×Ķ": 2922, - "Ġsens": 2923, - "Ġteach": 2924, - "ido": 2925, - "hod": 2926, - "Ġepis": 2927, - "Ġwelcome": 2928, - "Ġwall": 2929, - "ä¹": 2930, - "Ġchance": 2931, - "hen": 2932, - "ĠС": 2933, - "ĠÄij": 2934, - "Ġsimply": 2935, - "ĠÑĤак": 2936, - "ring": 2937, - "ja": 2938, - "book": 2939, - "Ġseveral": 2940, - "ste": 2941, - "Ġcreated": 2942, - "ĠоÑĤ": 2943, - "Ġpush": 2944, - "==": 2945, - "Ġhigher": 2946, - "uf": 2947, - "ource": 2948, - "oke": 2949, - "Ġonline": 2950, - "Ġrele": 2951, - "Ġton": 2952, - "ensive": 2953, - "Ġfavorite": 2954, - "Ñĥд": 2955, - "Ġlooked": 2956, - "Ġvon": 2957, - "âĢĶ": 2958, - "Ġfür": 2959, - "Ġbutton": 2960, - "Ġbill": 2961, - "Ġchanges": 2962, - "!\"": 2963, - "Ġslow": 2964, - "ables": 2965, - "Ġdeath": 2966, - "ands": 2967, - "ateg": 2968, - "Ġthemselves": 2969, - "ãģ£": 2970, - "Ġcop": 2971, - "ãģ®": 2972, - "Ġpersonal": 2973, - "ughing": 2974, - "Ġ11": 2975, - "gar": 2976, - "ades": 2977, - "Ġneeded": 2978, - "Ġstudy": 2979, - "aged": 2980, - "ÑģÑĤв": 2981, - "ino": 2982, - "Ġdisc": 2983, - "ki": 2984, - "Ġaddress": 2985, - "ר": 2986, - "itten": 2987, - "esome": 2988, - "Ġж": 2989, - "¤ë": 2990, - "ura": 2991, - "Ġmu": 2992, - "Ġcontinu": 2993, - "for": 2994, - "Ġmatch": 2995, - "ãģ¦": 2996, - "Ġstraight": 2997, - "IJë": 2998, - "ners": 2999, - "Ġdog": 3000, - "Ġdeb": 3001, - "ĠCO": 3002, - "Ġos": 3003, - "ged": 3004, - "came": 3005, - "Ġcorrect": 3006, - "ette": 3007, - "ĠSee": 3008, - "Ġincluding": 3009, - "ĠEuro": 3010, - "ester": 3011, - "Ġjump": 3012, - "ĠWhich": 3013, - "Ġкак": 3014, - "son": 3015, - "ya": 3016, - "ING": 3017, - "Ġeine": 3018, - "osh": 3019, - "ency": 3020, - "Ġmedia": 3021, - "Ġsubscribe": 3022, - "éĤ": 3023, - "Ġprin": 3024, - "Ġhab": 3025, - "ĠPer": 3026, - "ĠWas": 3027, - "Ġpage": 3028, - "itor": 3029, - "Ġtowards": 3030, - "Ġtried": 3031, - "enge": 3032, - "artment": 3033, - "Ġvari": 3034, - "Ġpaper": 3035, - "Ġpicture": 3036, - "Ġversion": 3037, - "Ġbrought": 3038, - "ware": 3039, - "ĠStates": 3040, - "Ġsich": 3041, - "ledge": 3042, - "Ġpercent": 3043, - "Ġgod": 3044, - "ec": 3045, - "ĠComm": 3046, - "Ġdecided": 3047, - "Ġselect": 3048, - "íķľ": 3049, - ").": 3050, - "urity": 3051, - "Ġfurther": 3052, - "Ġcomments": 3053, - "lement": 3054, - "Ġdream": 3055, - "Ġcenter": 3056, - "mi": 3057, - "Ġcas": 3058, - "Ġwoman": 3059, - "Ġroad": 3060, - "Ġfail": 3061, - "Ġbecame": 3062, - "lus": 3063, - "ilities": 3064, - "ãģ¯": 3065, - "ĠCo": 3066, - "Ġmanage": 3067, - "Ġrecogn": 3068, - "Ġaction": 3069, - "Ġbenef": 3070, - "Ġearlier": 3071, - "׾": 3072, - "Ġspeed": 3073, - "Ġment": 3074, - "Ġsoci": 3075, - "Ġshoot": 3076, - "ui": 3077, - "Ġä": 3078, - "Ġapply": 3079, - "vo": 3080, - "xim": 3081, - "Ġcause": 3082, - "Ġsurpr": 3083, - "Ġhaben": 3084, - "DI": 3085, - "Ġfather": 3086, - "ĠNext": 3087, - "ĠYouTube": 3088, - "Ġcode": 3089, - "Ġrole": 3090, - "gress": 3091, - "Ġgreen": 3092, - "ett": 3093, - "Ġbuilt": 3094, - "Ġflow": 3095, - "Ġbase": 3096, - "Ġtraining": 3097, - "Ġround": 3098, - "ĠWill": 3099, - "Ġpath": 3100, - "ĠRo": 3101, - "Ġinterested": 3102, - "ìĸ´": 3103, - "Ġrespect": 3104, - "Ġchanged": 3105, - "ission": 3106, - "Ġstudent": 3107, - "ograph": 3108, - "Ġapproach": 3109, - "Ġshows": 3110, - "å°±": 3111, - "Ġtar": 3112, - "Ġcrit": 3113, - "Ġglo": 3114, - "ìĬµëĭĪëĭ¤": 3115, - "Ġdead": 3116, - "ĠPresident": 3117, - "Ġthous": 3118, - "Ġbal": 3119, - "ster": 3120, - "ex": 3121, - "Ġabsolutely": 3122, - "Ġmic": 3123, - "Ġpractice": 3124, - "Ġquality": 3125, - "Ġlower": 3126, - "ogle": 3127, - "Ġsepar": 3128, - "ball": 3129, - "medi": 3130, - "Ġreview": 3131, - "ĠApp": 3132, - "Ġok": 3133, - "âĢĭ": 3134, - "Ġexperien": 3135, - "Ġconcern": 3136, - "entially": 3137, - "more": 3138, - "ĠJo": 3139, - "apan": 3140, - "ĠIch": 3141, - "istic": 3142, - "Ġfair": 3143, - "Ġwebsite": 3144, - "ires": 3145, - "ĠBy": 3146, - "Ġtravel": 3147, - "Ġrisk": 3148, - "Ġmir": 3149, - "Ġboard": 3150, - "Ġsen": 3151, - "Ġparents": 3152, - "ĠWow": 3153, - "Ġfeed": 3154, - "Ġsave": 3155, - "Ġserious": 3156, - "Ġinit": 3157, - "EL": 3158, - "undred": 3159, - "AS": 3160, - "Ġvan": 3161, - "orrow": 3162, - "Ġworth": 3163, - "Ġsearch": 3164, - "Ġ16": 3165, - "Ġparts": 3166, - "ÑģÑĤÑĮ": 3167, - "Ġcompan": 3168, - "Ġmovie": 3169, - "Ġmethod": 3170, - "Ġill": 3171, - "Ġwish": 3172, - "dy": 3173, - "Ġitem": 3174, - "Ġminus": 3175, - "anger": 3176, - "Ġvoice": 3177, - "Ġskin": 3178, - "Ġareas": 3179, - "Ġeight": 3180, - "Ġobs": 3181, - "Ġ,": 3182, - "ай": 3183, - "Ġoil": 3184, - "Ġcy": 3185, - "Ġbaby": 3186, - "sy": 3187, - "Ġemploy": 3188, - "ĠKe": 3189, - "Ġplaces": 3190, - "Ġfix": 3191, - "Ġestá": 3192, - "ãģ¨": 3193, - "ived": 3194, - "Ġlots": 3195, - "Ġseason": 3196, - "unk": 3197, - "alt": 3198, - "Ġtable": 3199, - "ĠТ": 3200, - "â": 3201, - "Ġattention": 3202, - "ãģª": 3203, - "ĠHer": 3204, - "Ġage": 3205, - "Ġpra": 3206, - "back": 3207, - "cil": 3208, - "Ġnetwork": 3209, - "rit": 3210, - "Ġdoc": 3211, - "Ġaren": 3212, - "igen": 3213, - "ĠëĦ": 3214, - "د": 3215, - "ender": 3216, - "Ġtotal": 3217, - "Ġprice": 3218, - "Ġcrazy": 3219, - "ìļ": 3220, - "iqu": 3221, - "though": 3222, - "You": 3223, - "Ùĩ": 3224, - "ãĤĵ": 3225, - "Ïħ": 3226, - "Ġsat": 3227, - "Ġbi": 3228, - "ĠDie": 3229, - "Ġsha": 3230, - "Ġthanks": 3231, - "uh": 3232, - "Ġstage": 3233, - "аж": 3234, - "ĠFl": 3235, - "Ġleav": 3236, - "Ġboy": 3237, - "Ġaf": 3238, - "ön": 3239, - "ĠGet": 3240, - "Ġaccept": 3241, - "Ġenter": 3242, - "Ġtur": 3243, - "ĠsiÄĻ": 3244, - "Ġhonest": 3245, - "ãĢĮ": 3246, - "Ġsam": 3247, - "Ġrepl": 3248, - "ging": 3249, - "Ġdevelopment": 3250, - "ĠAct": 3251, - "ora": 3252, - "ãĢį": 3253, - "ä¾": 3254, - "Ġknows": 3255, - "Ġimage": 3256, - "ĠLord": 3257, - "иÑĤÑĮ": 3258, - "Ġweeks": 3259, - "Ġsex": 3260, - "Ķë": 3261, - "Ġhundred": 3262, - "Ġsounds": 3263, - "Ġlearned": 3264, - "Ġbud": 3265, - "ĠÑģÑĤ": 3266, - "Ġincred": 3267, - "âĻ": 3268, - "Ġnos": 3269, - "Ġdrop": 3270, - "Ġben": 3271, - "ĠÐĺ": 3272, - "Ġsafe": 3273, - "ata": 3274, - "Ġfuck": 3275, - "soci": 3276, - "Ġdan": 3277, - "Ġcross": 3278, - "10": 3279, - "mo": 3280, - "vert": 3281, - "Ġ17": 3282, - "zie": 3283, - "åķ": 3284, - "Ġdom": 3285, - "ĠBo": 3286, - "Ġsetting": 3287, - "Ġinvolved": 3288, - "arily": 3289, - "Ġsind": 3290, - "Ġsus": 3291, - "Ġworry": 3292, - "eth": 3293, - "ê¹Į": 3294, - "Ġsun": 3295, - "Ġhier": 3296, - "Ġcertainly": 3297, - "oul": 3298, - "orts": 3299, - "ĠEr": 3300, - "ĠUm": 3301, - "Ġcaus": 3302, - "Ġnatural": 3303, - "Ġü": 3304, - "Ġcry": 3305, - "ĠSec": 3306, - "Ġsom": 3307, - "æ²": 3308, - "Ġeducation": 3309, - "аеÑĤ": 3310, - "Ġmultip": 3311, - "Ġalone": 3312, - "Ġeye": 3313, - "Ġrate": 3314, - "ĠEurope": 3315, - "è¿": 3316, - "mon": 3317, - "Ġfit": 3318, - "izing": 3319, - "pped": 3320, - "Ġpressure": 3321, - "the": 3322, - "иÑģ": 3323, - "ites": 3324, - "ĠAf": 3325, - "reci": 3326, - "attle": 3327, - "Ġservices": 3328, - "ĠGoogle": 3329, - "éģ": 3330, - "Ġcases": 3331, - "Ġdrive": 3332, - "Ġchalleng": 3333, - "uz": 3334, - "ĠMo": 3335, - "ìľ¼ë": 3336, - "val": 3337, - "åĢĭ": 3338, - "Ġfol": 3339, - "Ġì¢": 3340, - "ffic": 3341, - "Ġra": 3342, - "Ġsin": 3343, - "Ġblue": 3344, - "Ġaffect": 3345, - "Ġmis": 3346, - "Ġshot": 3347, - "Ġоб": 3348, - "asing": 3349, - "Ġsignific": 3350, - "ĠChe": 3351, - "Ġê³": 3352, - "Ġpositive": 3353, - "ì£": 3354, - "Ġwie": 3355, - "Ġ40": 3356, - "ording": 3357, - "ĠFrom": 3358, - "êµ": 3359, - "Ġbrand": 3360, - "Ġtrust": 3361, - "Ġple": 3362, - "Ġcommunic": 3363, - "Ġweight": 3364, - "Ġasking": 3365, - "Ġtax": 3366, - "ĠJapan": 3367, - "ãģŁ": 3368, - "Ġíķĺ": 3369, - "ops": 3370, - "ÏĤ": 3371, - "Ġputting": 3372, - "Ġroll": 3373, - "ĠAmerica": 3374, - "reg": 3375, - "ŀ×": 3376, - "atures": 3377, - "ension": 3378, - "ĠSomet": 3379, - "Ġoriginal": 3380, - "ping": 3381, - "ĠÅŁ": 3382, - "Ġproducts": 3383, - "ãĥ¼": 3384, - "Ġcontact": 3385, - "olution": 3386, - "Ġgoal": 3387, - "Ġpow": 3388, - "Ġperformance": 3389, - "Ġblood": 3390, - "ators": 3391, - "ĠMich": 3392, - "Ġtemper": 3393, - "ĠDan": 3394, - "Ġsugg": 3395, - "ÑĤи": 3396, - "Ġimm": 3397, - "Ġoffice": 3398, - "Ġarri": 3399, - "Ġcomfort": 3400, - "ĠÐĶ": 3401, - "Ġsuggest": 3402, - "Ġplat": 3403, - "Ĥĺ": 3404, - "19": 3405, - "Ġom": 3406, - "Ġseven": 3407, - "ĠCent": 3408, - "ille": 3409, - "Ġconcept": 3410, - "Ġbag": 3411, - "ün": 3412, - "ively": 3413, - "Ġdiv": 3414, - "mos": 3415, - "æī": 3416, - "Ġfeels": 3417, - "Ġir": 3418, - "akes": 3419, - "ley": 3420, - "Ġparticip": 3421, - "ĠÐļ": 3422, - "fl": 3423, - "just": 3424, - "Ġsil": 3425, - "ĠPa": 3426, - "AL": 3427, - "Ġgotta": 3428, - "Ġfan": 3429, - "Ġchallenge": 3430, - "Ġcompanies": 3431, - "ĠPeople": 3432, - "": 12331, - "Ġheroes": 12332, - "ĠBoston": 12333, - "Ġdependent": 12334, - "Ġmotivation": 12335, - "flix": 12336, - "Ġseam": 12337, - "кие": 12338, - "Ġdrain": 12339, - "oded": 12340, - "Ġguilty": 12341, - "ĠJenn": 12342, - "ingen": 12343, - "Ġgranted": 12344, - "ĠKelly": 12345, - "ĠSav": 12346, - "ĠUncle": 12347, - "ĠHonestly": 12348, - "ELI": 12349, - "Ġnavigate": 12350, - "Ġblessed": 12351, - "core": 12352, - "Ġearning": 12353, - "Ġsignals": 12354, - "Ġdisk": 12355, - "ials": 12356, - "Ġages": 12357, - "æħ": 12358, - "Ġparticle": 12359, - "ĠÑĩеÑĢ": 12360, - "Ġcann": 12361, - "Ġtier": 12362, - "Ġstatements": 12363, - "ê³łìļĶ": 12364, - "ĠëķĮ문ìĹIJ": 12365, - "ĠCho": 12366, - "Ġpolar": 12367, - "anç": 12368, - "ĠKenn": 12369, - "ĠNi": 12370, - "ĠFight": 12371, - "organ": 12372, - "éķ": 12373, - "ĠCha": 12374, - "ĠSÃŃ": 12375, - "ãĥª": 12376, - "Ġslic": 12377, - "Ġcertific": 12378, - "Ġtemplate": 12379, - "ĠFederal": 12380, - "Ġconsideration": 12381, - "Ġexplo": 12382, - "ĠMain": 12383, - "ĠNE": 12384, - "Ġalongside": 12385, - "Ġdressed": 12386, - "ĠPoint": 12387, - "Ġenvironments": 12388, - "Ġpróxim": 12389, - "Ġdaar": 12390, - "Ġprompt": 12391, - "Ġpursue": 12392, - "Ġentertainment": 12393, - "Ġthroat": 12394, - "Ġproblema": 12395, - "Ġmart": 12396, - "ì¼": 12397, - "Ġprovider": 12398, - "ØĮ": 12399, - "Ġ×Ĺ": 12400, - "inte": 12401, - "making": 12402, - "Ġstroke": 12403, - "Ġtissue": 12404, - "Un": 12405, - "Ġprecious": 12406, - "ĠArts": 12407, - "inking": 12408, - "ĠÐŀн": 12409, - "ĠиÑģ": 12410, - "nah": 12411, - "ĠÐķÑģли": 12412, - "Ġcorners": 12413, - "Ġtricky": 12414, - "inch": 12415, - "lijk": 12416, - "Ġpressing": 12417, - "level": 12418, - "ANG": 12419, - "Ġradiation": 12420, - "ìĦł": 12421, - "Ġconfront": 12422, - "Ġvet": 12423, - "Ġrepresentative": 12424, - "Ġpropag": 12425, - "Ġcrap": 12426, - "ĠDec": 12427, - "Ġramp": 12428, - "епеÑĢÑĮ": 12429, - "ués": 12430, - "essen": 12431, - "cription": 12432, - "Ġbills": 12433, - "ĠMatthew": 12434, - "Ġanime": 12435, - "ất": 12436, - "Ġlowest": 12437, - "has": 12438, - "screen": 12439, - "ograp": 12440, - "ало": 12441, - "inton": 12442, - "ĠJah": 12443, - "èĢħ": 12444, - "itÃł": 12445, - "Ġkay": 12446, - "Ġrotation": 12447, - "ĠWere": 12448, - "abei": 12449, - "Ġtrials": 12450, - "Ġlever": 12451, - "ighty": 12452, - "Ġspoon": 12453, - "Ġhunt": 12454, - "cling": 12455, - "Ġdism": 12456, - "ĠболÑĮÑĪ": 12457, - "Ġassault": 12458, - "Ġíĺķ": 12459, - "Ġweekly": 12460, - "Ġmismo": 12461, - "Ġgenetic": 12462, - "ulpt": 12463, - "ĠStudent": 12464, - "Ġrealistic": 12465, - "Ġauthentic": 12466, - "æīĵ": 12467, - "asta": 12468, - "Ġarrested": 12469, - "Ġguidelines": 12470, - "Ġ׾×IJ": 12471, - "Ġдав": 12472, - "ĠComing": 12473, - "für": 12474, - "Ġrequests": 12475, - "ĥIJ": 12476, - "Ġanalyze": 12477, - "Ġinteress": 12478, - "Ġhalt": 12479, - "ĠOper": 12480, - "onom": 12481, - "Ġduck": 12482, - "Ġwithd": 12483, - "ser": 12484, - "ĠÏĮ": 12485, - "ĠHistory": 12486, - "Ġyoutube": 12487, - "ãĤį": 12488, - "Ġsaber": 12489, - "walk": 12490, - "font": 12491, - "Ġoverview": 12492, - "39": 12493, - "üy": 12494, - "etti": 12495, - "Ġfrozen": 12496, - "Ġflesh": 12497, - "ÄŁi": 12498, - "ĠPM": 12499, - "ĠìĻĢ": 12500, - "é¢": 12501, - "ÑĨии": 12502, - "Ġ기ë": 12503, - "íģ¬": 12504, - "Ġprose": 12505, - "oooo": 12506, - "rates": 12507, - "WS": 12508, - "Ġautomatic": 12509, - "Ġcollecting": 12510, - "Åij": 12511, - "Ġneighbors": 12512, - "».": 12513, - "ĠExpl": 12514, - "Ġcircul": 12515, - "cover": 12516, - "weg": 12517, - "Ġsticks": 12518, - "Ġeller": 12519, - "Ġwww": 12520, - "Ġdorm": 12521, - "ĠExper": 12522, - "Ġstatistics": 12523, - "Ġemails": 12524, - "Ġgrave": 12525, - "imiz": 12526, - "HS": 12527, - "Ġuit": 12528, - ",'": 12529, - "Ġlaser": 12530, - "èī": 12531, - "ĠÑĤем": 12532, - "ÑĭÑĪ": 12533, - "ÑīÑij": 12534, - "Ġgenau": 12535, - "Ġtienen": 12536, - "Ġmeditation": 12537, - "ĠOrgan": 12538, - "Ġestimate": 12539, - "Ġ무ì": 12540, - "lets": 12541, - "ĠnÃły": 12542, - "Ġmindset": 12543, - "Ġreson": 12544, - "Ġmés": 12545, - "Ġnumerous": 12546, - "Ġvielleicht": 12547, - "ĠThird": 12548, - "uous": 12549, - "ĠDead": 12550, - "анд": 12551, - "HN": 12552, - "Ġracing": 12553, - "Ġagents": 12554, - "ĠUt": 12555, - "Ġtear": 12556, - "ĠHP": 12557, - "Ġchemistry": 12558, - "Ġsurvival": 12559, - "æĸ°": 12560, - "Ġconvinced": 12561, - "Ġ;": 12562, - "Ġregulations": 12563, - "ĠES": 12564, - "åĴĮ": 12565, - "300": 12566, - "Ġense": 12567, - "Ġìµ": 12568, - "Ġdict": 12569, - "GA": 12570, - "ĠahÃŃ": 12571, - "åĭķ": 12572, - "Ġtej": 12573, - "ĠоÑģÑĤ": 12574, - "ĠElect": 12575, - "Ġintellectual": 12576, - "Ġbias": 12577, - "Ġburden": 12578, - "çĤ¹": 12579, - "Ġìĸ´ëĸ»": 12580, - "Ġcheer": 12581, - "Ġsoph": 12582, - "Ġportfolio": 12583, - "uba": 12584, - "Ġestos": 12585, - "TV": 12586, - "For": 12587, - "Ġash": 12588, - "Ġkommer": 12589, - "Ġcollective": 12590, - "Ġwrest": 12591, - "ĠJetzt": 12592, - "ĠWat": 12593, - "reich": 12594, - "Ġprimer": 12595, - "active": 12596, - "Ġmie": 12597, - "icked": 12598, - "Ġhunting": 12599, - "Ġtestim": 12600, - "Ġcompassion": 12601, - "Ġر": 12602, - "Ġbrut": 12603, - "Ġsalad": 12604, - "обÑīе": 12605, - "Ġsolving": 12606, - "Ġfloating": 12607, - "ç·": 12608, - "Ġattractive": 12609, - "ÙĪÙĦ": 12610, - "Ġperd": 12611, - "iffer": 12612, - "Ġsculpt": 12613, - "hhh": 12614, - "ĠWeek": 12615, - "Ġenthus": 12616, - "Ġnad": 12617, - "Ġmerch": 12618, - "ĠíĻķ": 12619, - "Ġmile": 12620, - "好äºĨ": 12621, - "Ġθ": 12622, - "ĠëĤĺë": 12623, - "éĩį": 12624, - "38": 12625, - "Ġchains": 12626, - "ĠAlmost": 12627, - "Ġtickets": 12628, - "rin": 12629, - "ĠCC": 12630, - "Ġdistributed": 12631, - "abetes": 12632, - "Ġtemperatures": 12633, - "Ġgained": 12634, - "Ġflexibility": 12635, - "Ġscreaming": 12636, - "Ġabroad": 12637, - "uno": 12638, - "Ġentrepreneurs": 12639, - "ĠNetwork": 12640, - "ĠCanadian": 12641, - "Ġprev": 12642, - "Ġsö": 12643, - "ĠÑĤебÑı": 12644, - "ĠPoke": 12645, - "ĠPod": 12646, - "ĠTurkey": 12647, - "çı¾åľ¨": 12648, - "Ġabstract": 12649, - "Ġsnake": 12650, - "ĠAmy": 12651, - "ĠëĬIJëĤĮ": 12652, - "Ġbrave": 12653, - "ĠìŀĪìĸ´ìļĶ": 12654, - "ĠKal": 12655, - "Ġ2007": 12656, - "ário": 12657, - "Ġmarked": 12658, - "gines": 12659, - "Ġalloc": 12660, - "ONG": 12661, - "Ġscientist": 12662, - "Ġesca": 12663, - "Ġracism": 12664, - "×ij×": 12665, - "ĠSams": 12666, - "ĠPenn": 12667, - "Ġloads": 12668, - "Ġந": 12669, - "über": 12670, - "Me": 12671, - "ixò": 12672, - "Ġperò": 12673, - "anne": 12674, - "Ġexpressed": 12675, - "меÑĢ": 12676, - "Ġmoet": 12677, - "Ġreturning": 12678, - "nia": 12679, - "Ġexpon": 12680, - "Pro": 12681, - "Ġloyal": 12682, - "ML": 12683, - "Ġlamp": 12684, - "Ġshy": 12685, - "Ġcomposition": 12686, - "ĠLy": 12687, - "Ġmagnetic": 12688, - "Ġpremier": 12689, - "Ġmeasured": 12690, - "Ġsummary": 12691, - "Ġattacked": 12692, - "Ġfinishing": 12693, - "ÐĹ": 12694, - "ç¥": 12695, - "Ġsits": 12696, - "Ġhydrogen": 12697, - "Ġmai": 12698, - "ĠDeutsch": 12699, - "ası": 12700, - "Ġobtain": 12701, - "vie": 12702, - "Ġsoit": 12703, - "Ġë°Ķ": 12704, - "Ġlane": 12705, - "Ġconsegu": 12706, - "во": 12707, - "Ġease": 12708, - "akin": 12709, - "ĠFa": 12710, - "Ġuntuk": 12711, - "Ġburst": 12712, - "Ġcum": 12713, - "alım": 12714, - "úblic": 12715, - "idi": 12716, - "ĠRoyal": 12717, - "ĠKon": 12718, - "Ġcommonly": 12719, - "Ġremoving": 12720, - "Ġjur": 12721, - "ilib": 12722, - "Ġanch": 12723, - "íĸī": 12724, - "ượ": 12725, - "ĠÐľÑĭ": 12726, - "ĠAnth": 12727, - "ĠSÃ¥": 12728, - "Ġinterrupt": 12729, - "Ġstere": 12730, - "ĠOS": 12731, - "onym": 12732, - "tery": 12733, - "ĠMaria": 12734, - "ê²ĥ": 12735, - "Ġexploring": 12736, - "Ġtransparent": 12737, - "Ġfate": 12738, - "ĠJung": 12739, - "Ġgrup": 12740, - "Ġdarker": 12741, - "ĠDoug": 12742, - "Ġmane": 12743, - "æĶ¾": 12744, - "ại": 12745, - "dri": 12746, - "look": 12747, - "ĠDesign": 12748, - "Ġtutaj": 12749, - "Ġhorizontal": 12750, - "reon": 12751, - "orte": 12752, - "ĠCorrect": 12753, - "ĠSteven": 12754, - "Ġvine": 12755, - "02": 12756, - "iÄĩ": 12757, - "Ġsiempre": 12758, - "ĠKey": 12759, - "åĥı": 12760, - "ĠGames": 12761, - "Ġnaar": 12762, - "Ġshocked": 12763, - "elve": 12764, - "ĠRose": 12765, - "ìĭ¬": 12766, - "Ġstopping": 12767, - "ohl": 12768, - "ĠMix": 12769, - "Ġsuffered": 12770, - "Ġsigma": 12771, - "Ġweakness": 12772, - "ĠOw": 12773, - "ีà¹Ī": 12774, - "IF": 12775, - "Ġà®ħ": 12776, - "aded": 12777, - "ĠNetflix": 12778, - "anes": 12779, - "Ġremained": 12780, - "iry": 12781, - "Ġrip": 12782, - "ellt": 12783, - "Ġsilent": 12784, - "Ġproven": 12785, - "Ġtoxic": 12786, - "Ġalumin": 12787, - "Ġmultipl": 12788, - "aland": 12789, - "Ġ34": 12790, - "06": 12791, - "ĠBru": 12792, - "Ġìłķë§IJ": 12793, - "Just": 12794, - "boy": 12795, - "Ġshoe": 12796, - "Ġcreature": 12797, - "Ġheaded": 12798, - "ĠоÑĤк": 12799, - "æ±": 12800, - "Ġessence": 12801, - "Ġremarkable": 12802, - "Ġnúmer": 12803, - "Ġdrew": 12804, - "Ġpuzzle": 12805, - "ĠLibrary": 12806, - "ĠFu": 12807, - "ashes": 12808, - "kk": 12809, - "ĠIst": 12810, - "¦°": 12811, - "ĠBry": 12812, - "Ġceremony": 12813, - "Ġà®İ": 12814, - "Ġcri": 12815, - "equ": 12816, - "ãĤ¢": 12817, - "Ġprize": 12818, - "Ġdimensions": 12819, - "ogram": 12820, - "Ġleather": 12821, - "Ġpopulations": 12822, - "uum": 12823, - "Ġvegan": 12824, - "Ñıд": 12825, - "Ġcómo": 12826, - "åĦ": 12827, - "Ġstrip": 12828, - "å£": 12829, - "Ġvacation": 12830, - "ħķ": 12831, - "Ġmeals": 12832, - "ilipp": 12833, - "Ġents": 12834, - "aram": 12835, - "richt": 12836, - "Ġgrain": 12837, - "ĠSpain": 12838, - "Ġcheek": 12839, - "ĠAff": 12840, - "ION": 12841, - "ĠBring": 12842, - "Ġ38": 12843, - "ielen": 12844, - "ulu": 12845, - "ĠболÑĮÑĪе": 12846, - "Ġannouncement": 12847, - "ĠÑĤÑĥÑĤ": 12848, - "ĠProphet": 12849, - "ardo": 12850, - "37": 12851, - "Ġwoke": 12852, - "Ġtranslation": 12853, - "ĠNOT": 12854, - "ĠCL": 12855, - "ĠdÃ¼ÅŁ": 12856, - "ÑĨÑĸ": 12857, - "acer": 12858, - "ĠLoc": 12859, - "Ġperception": 12860, - "NO": 12861, - "Ġdiesen": 12862, - "Look": 12863, - "heart": 12864, - "aved": 12865, - "Ġboundary": 12866, - "Ġflows": 12867, - "Ñijм": 12868, - "Ġarguments": 12869, - "Ġelections": 12870, - "ıs": 12871, - "Ġheck": 12872, - "Ġsuitable": 12873, - "Ġfiber": 12874, - "ĠStra": 12875, - "xy": 12876, - "ĠHum": 12877, - "Ġmonthly": 12878, - "uper": 12879, - "Ġgolf": 12880, - "Ġlately": 12881, - "ĠGard": 12882, - "ĠRen": 12883, - "ĠAst": 12884, - "ĠFant": 12885, - "аÑģÑģ": 12886, - "Ġobser": 12887, - "ë¡ľ": 12888, - "Ġeasiest": 12889, - "įĶë": 12890, - "Ġwebsites": 12891, - "pol": 12892, - "Ġcocon": 12893, - "Ġà®ĩ": 12894, - "ĠVeg": 12895, - "Ġwalks": 12896, - "Ġintro": 12897, - "Ġdirected": 12898, - "ĠAnna": 12899, - "Ġëĵ¤ìĸ´": 12900, - "ĠEastern": 12901, - "ĠSaint": 12902, - "ĠBow": 12903, - "Ġroast": 12904, - "ĠURL": 12905, - "Ġjeden": 12906, - "uras": 12907, - "aja": 12908, - "Ġsemi": 12909, - "Ġrapidly": 12910, - "Ġtargets": 12911, - "ĠControl": 12912, - "Ġbah": 12913, - "Ġreflection": 12914, - "Ġcreativity": 12915, - "holders": 12916, - "Ġìĺ¬ë": 12917, - "Ġamongst": 12918, - "Ġfeeding": 12919, - "ÑįÑĤомÑĥ": 12920, - "Ġвиде": 12921, - "Ġë§Įëĵ¤": 12922, - "ĠSmart": 12923, - "Ġreliable": 12924, - "Ġvezes": 12925, - "Ġר": 12926, - "chuckles": 12927, - "azione": 12928, - "ĠWilliams": 12929, - "Ġaç": 12930, - "Ġslee": 12931, - "еÑī": 12932, - "Ġtimeline": 12933, - "Ġthorough": 12934, - "á»į": 12935, - "ĠOt": 12936, - "ạn": 12937, - "Ġimagination": 12938, - "Ġmechanics": 12939, - "rist": 12940, - "Ġclaimed": 12941, - "ÏĦη": 12942, - "ête": 12943, - "ĠHurry": 12944, - "ĠiPad": 12945, - "Ġconstru": 12946, - "ĠCla": 12947, - "ĠAls": 12948, - "ä¼ļ": 12949, - "utz": 12950, - "Ġcultures": 12951, - "Ġìĸ´ëĸ»ê²Į": 12952, - "Ġbelongs": 12953, - "Ġyer": 12954, - "ĠDoesn": 12955, - "Ġgeomet": 12956, - "Ġbid": 12957, - "Ġfoam": 12958, - "Ġhob": 12959, - "ĠBritain": 12960, - "Ġsubstance": 12961, - "Ġanniversary": 12962, - "ĠëĦĪ": 12963, - "Ġnoted": 12964, - "Ġgovernor": 12965, - "Ġstocks": 12966, - "31": 12967, - "Ġdiye": 12968, - "ìĬ¤ë": 12969, - "Ġreb": 12970, - "zel": 12971, - "Ġmultiply": 12972, - "Ġoperator": 12973, - "Ħ¤ìļĶ": 12974, - "Ġwaters": 12975, - "Ġdär": 12976, - "Ġunser": 12977, - "ĠElizabeth": 12978, - "é«ĺ": 12979, - "Ġincreasingly": 12980, - "ĠGro": 12981, - "Ġengines": 12982, - "irs": 12983, - "Ø«": 12984, - "Ġtreasure": 12985, - "PC": 12986, - "inction": 12987, - "iri": 12988, - "Ġaccum": 12989, - "Ġvariation": 12990, - "Ġpom": 12991, - "Ġtitles": 12992, - "ĠFest": 12993, - "ós": 12994, - "Ġelder": 12995, - "nym": 12996, - "run": 12997, - "Ñıв": 12998, - "Ġinnovative": 12999, - "Ġnombre": 13000, - "Ġcoinc": 13001, - "Ġfranch": 13002, - "Ġentonces": 13003, - "Ġnichts": 13004, - "Ġexclusive": 13005, - "ĠCheers": 13006, - "ĠBi": 13007, - "uje": 13008, - "æŃ¡": 13009, - "Ġpok": 13010, - "ĠPrem": 13011, - "Ġrocket": 13012, - "ELIPE": 13013, - "Ġhospitals": 13014, - "rium": 13015, - "Ġjuste": 13016, - "Ġhammer": 13017, - "Ġquantum": 13018, - "Ġresponses": 13019, - "lly": 13020, - "endi": 13021, - "Ġactively": 13022, - "Ġfridge": 13023, - "iate": 13024, - "long": 13025, - "Ġquem": 13026, - "Ġdeaths": 13027, - "Ġsuperior": 13028, - "cken": 13029, - "ìĿ´ìĹIJ": 13030, - "ktop": 13031, - "Ġgathered": 13032, - "£¨": 13033, - "Ġdazu": 13034, - "Ġrecipes": 13035, - "Ġbuzz": 13036, - "cen": 13037, - "Ġanytime": 13038, - "onsense": 13039, - "Ġcircles": 13040, - "Ġsolved": 13041, - "Ġìĭł": 13042, - "Ġcoronavirus": 13043, - "ĠLuke": 13044, - "Ġbubb": 13045, - "Ġcontempor": 13046, - "rzy": 13047, - "ĠJane": 13048, - "Ġдом": 13049, - "Ġscrews": 13050, - "Ġhybrid": 13051, - "Ġcasual": 13052, - "Ġselbst": 13053, - "being": 13054, - "ĠÄIJ": 13055, - "ĠColumb": 13056, - "ĠÑħоÑĩ": 13057, - "Ġbucket": 13058, - "Ġevaluate": 13059, - "Ġidol": 13060, - "Ġreputation": 13061, - "ĠìĨĮë": 13062, - "ÙĪر": 13063, - "Ġhecho": 13064, - "Ġpoem": 13065, - "Ġsubjects": 13066, - "plant": 13067, - "ĠBeh": 13068, - "ĠSpeaking": 13069, - "Ġbatteries": 13070, - "Ġfollowers": 13071, - "öl": 13072, - "Ġgently": 13073, - "Ġsixt": 13074, - "Ġparameter": 13075, - "Ġikke": 13076, - "ĠTour": 13077, - "ĠDJ": 13078, - "otte": 13079, - "ĠJahren": 13080, - "Ġpreparation": 13081, - "ĠдÑĥм": 13082, - "Ġ800": 13083, - "cop": 13084, - "iking": 13085, - "Ġ문": 13086, - "ĠнÑĥ": 13087, - "ĠлеÑĤ": 13088, - "åIJĮ": 13089, - "ĠIde": 13090, - "Ġì¡°ê¸Ī": 13091, - "Ġlaughter": 13092, - "Ġmolecules": 13093, - "ĠRest": 13094, - "Ġobserved": 13095, - "dzie": 13096, - "Ġadvertising": 13097, - "erto": 13098, - "Ġmoins": 13099, - "ĠMIT": 13100, - "Ġexcit": 13101, - "Ġtum": 13102, - "Ġtyl": 13103, - "Ġinvested": 13104, - "Ġpharm": 13105, - "Ġunexpected": 13106, - "Ġphi": 13107, - "otype": 13108, - "weise": 13109, - "Ġgeç": 13110, - "jourd": 13111, - "Ġhorses": 13112, - "nÄħ": 13113, - "=\"": 13114, - "ĠSM": 13115, - "Ġfib": 13116, - "Ġclips": 13117, - "çķ¶": 13118, - "å¦Ĥæŀľ": 13119, - "Ġregime": 13120, - "Ġrotate": 13121, - "rou": 13122, - "nik": 13123, - "Ġarmor": 13124, - "ðŁĺ": 13125, - "еÑĢа": 13126, - "度": 13127, - "ĠOch": 13128, - "Ġrichtig": 13129, - "üzel": 13130, - "aneously": 13131, - "mek": 13132, - "éĮ¯": 13133, - "ĠXiao": 13134, - "Ġexisted": 13135, - "worth": 13136, - "ãģ£ãģ¨": 13137, - "Ġnaught": 13138, - "ĠheiÃŁt": 13139, - "ĠBal": 13140, - "Ġresid": 13141, - "ivot": 13142, - "omatic": 13143, - "Ġhired": 13144, - "Ġgradually": 13145, - "Ġonions": 13146, - "Ġcompat": 13147, - "Ġintim": 13148, - "Ġjew": 13149, - "Ġcontribution": 13150, - "ĠIre": 13151, - "acji": 13152, - "Ġslice": 13153, - "Ġimmun": 13154, - "ĠRus": 13155, - "Ġgrows": 13156, - "ĠSimilarly": 13157, - "Ġhardest": 13158, - "Ġstruck": 13159, - "Ġmeasurement": 13160, - "...]": 13161, - "they": 13162, - "ĠìłĢë": 13163, - "Ġsneak": 13164, - "Ġapplies": 13165, - "Ġнем": 13166, - "æĵ": 13167, - "×ijר": 13168, - "ĠЧÑĤо": 13169, - "Ġoutro": 13170, - "Ġinnocent": 13171, - "Ġmog": 13172, - "ĠSamsung": 13173, - "Ġmercy": 13174, - "Ġhandling": 13175, - "Ġintervention": 13176, - "idays": 13177, - "got": 13178, - "Ġcurric": 13179, - "Ġboundaries": 13180, - "Ġconfusing": 13181, - "Ŀ¼ëĬĶ": 13182, - "æĩ": 13183, - "Ġstitches": 13184, - "ÃŃvel": 13185, - "Ġtunnel": 13186, - "itä": 13187, - "Ġgost": 13188, - "imy": 13189, - "Ġczas": 13190, - "Ġmé": 13191, - "Ġcatal": 13192, - "ĠSimon": 13193, - "ĠLIAM": 13194, - "mic": 13195, - "ĠФ": 13196, - "Ġeyel": 13197, - "isas": 13198, - "ĠCPU": 13199, - "ĠDou": 13200, - "Ġnäch": 13201, - "Ġinfinity": 13202, - "Ġrif": 13203, - "ĠPeace": 13204, - "ĠCu": 13205, - "Ġminimal": 13206, - "Ġlistened": 13207, - "Ġpole": 13208, - "halb": 13209, - "Ġloaded": 13210, - "Ġsteady": 13211, - "ĠBesides": 13212, - "êm": 13213, - "Ġlap": 13214, - "Ġcoop": 13215, - "Ġfriendship": 13216, - "world": 13217, - "Ġgeh": 13218, - "Ġtylko": 13219, - "ĠLaura": 13220, - "Ġsurrounded": 13221, - "ĠEvent": 13222, - "Ġchap": 13223, - "ĠWonder": 13224, - "break": 13225, - "Ġdrove": 13226, - "Ġbroader": 13227, - "Ġchi": 13228, - "Fi": 13229, - "Ġgehen": 13230, - "Ġwestern": 13231, - "Ġintelligent": 13232, - "Ġpersist": 13233, - "Ġfounded": 13234, - "ãģĵãģ¨": 13235, - "Ġhistoric": 13236, - "ĠfrÃ¥": 13237, - "cksÃ¥": 13238, - "Ġhandy": 13239, - "Ġsymp": 13240, - "Ġrows": 13241, - "Ġnutri": 13242, - "bur": 13243, - "ĠLeon": 13244, - "Ġsistema": 13245, - "Ġextensive": 13246, - "ĠÑĥв": 13247, - "íı": 13248, - "Ġnights": 13249, - "Ġcác": 13250, - "Ġcounting": 13251, - "ĠMust": 13252, - "allow": 13253, - "еÑģÑģ": 13254, - "Mom": 13255, - "Ġнадо": 13256, - "Ġbarrel": 13257, - "ãĥŀ": 13258, - "ARD": 13259, - "Ġinstallation": 13260, - "Ġinsect": 13261, - "Ġëħ¸ë": 13262, - "ujÄħ": 13263, - "ĠÄiji": 13264, - "Ġpacked": 13265, - "Ġfiction": 13266, - "Now": 13267, - "ĠYay": 13268, - "Ġpert": 13269, - "rons": 13270, - "unde": 13271, - "aches": 13272, - "Ġstyles": 13273, - "Ġaprès": 13274, - "oku": 13275, - "ĠVice": 13276, - "ınız": 13277, - "comm": 13278, - "Ġassigned": 13279, - "Ġinteractions": 13280, - "Ġacab": 13281, - "FELIPE": 13282, - "Ġrescue": 13283, - "Ġindustries": 13284, - "ĠAndy": 13285, - "Ġpraise": 13286, - "Ġflame": 13287, - "Ġsnack": 13288, - "íĤ": 13289, - "çģ": 13290, - "Ġswo": 13291, - "render": 13292, - "Ġboards": 13293, - "ĠÑĤом": 13294, - "enne": 13295, - "Ġpasta": 13296, - "Ġdevil": 13297, - "ĠFel": 13298, - "Ġhatte": 13299, - "Ġcolleg": 13300, - "eh": 13301, - "ì»": 13302, - "ãģĵãģ®": 13303, - "Ġproductive": 13304, - "forward": 13305, - "ип": 13306, - "Ġsmartphone": 13307, - "Ġinvis": 13308, - "Ġbum": 13309, - "Ġwhoa": 13310, - "ìŀĦ": 13311, - "ĠocksÃ¥": 13312, - "ĠLang": 13313, - "ĠSyria": 13314, - "Ġsesi": 13315, - "ία": 13316, - "Ġapproval": 13317, - "48": 13318, - "Ġодин": 13319, - "Ġëĸ": 13320, - "ĠHarr": 13321, - "ĠAdminist": 13322, - "Ġפ": 13323, - "ĠDean": 13324, - "fi": 13325, - "Ġcitizen": 13326, - "Ġshark": 13327, - "05": 13328, - "Ġboil": 13329, - "Ġindicate": 13330, - "å¡": 13331, - "Are": 13332, - "Ġlayout": 13333, - "Ġrefr": 13334, - "ĠPacific": 13335, - "AAAA": 13336, - "ĠAustralian": 13337, - "gression": 13338, - "Voice": 13339, - "алÑģÑı": 13340, - "Ġshelter": 13341, - "To": 13342, - "aupt": 13343, - "Ġevaluation": 13344, - "apor": 13345, - "Ġcurrency": 13346, - "Ġмного": 13347, - "igos": 13348, - "ãģ°": 13349, - "Ġoct": 13350, - "Ġroyal": 13351, - "è³": 13352, - "asil": 13353, - "ĠChildren": 13354, - "Ġrien": 13355, - "Ġëĵľë": 13356, - "Ġbarrier": 13357, - "Ġejemplo": 13358, - "Ġek": 13359, - "ND": 13360, - "esp": 13361, - "ена": 13362, - "Ġpic": 13363, - "Ġkiller": 13364, - "Ġintegrate": 13365, - "Ġfewer": 13366, - "Ġdisabilities": 13367, - "Ġ....": 13368, - "Ġtriangle": 13369, - "Ġfees": 13370, - "Ġwidely": 13371, - "emi": 13372, - "Ġoverwhelming": 13373, - "Ġzomb": 13374, - "Ġbere": 13375, - "Ġhood": 13376, - "ĠAye": 13377, - "ĠHarvard": 13378, - "ev": 13379, - "ĠÏĦοÏħ": 13380, - "Ġcups": 13381, - "ĠAuch": 13382, - "zona": 13383, - "Ġ1990": 13384, - "ĠweiÃŁ": 13385, - "Ġcrunch": 13386, - "æ¥": 13387, - "Ġзав": 13388, - "Ġmeasuring": 13389, - "Ġstations": 13390, - "ĠStephen": 13391, - "Ġshortly": 13392, - "Ġsigning": 13393, - "Ġcomedy": 13394, - "omo": 13395, - "Ġsuggestions": 13396, - "Ġsignature": 13397, - "ĠпÑĢив": 13398, - "Ġdisorder": 13399, - "aska": 13400, - "Ġworlds": 13401, - "Ġprecisely": 13402, - "norm": 13403, - "rav": 13404, - "ĠCivil": 13405, - "Inter": 13406, - "ĠCertain": 13407, - "Ġinjured": 13408, - "Ġsuggests": 13409, - "ĠGolden": 13410, - "Ġcyber": 13411, - "ĠØ´": 13412, - "Ġtemporary": 13413, - "Ġcooper": 13414, - "Ġvoted": 13415, - "Ġought": 13416, - "ấy": 13417, - "xual": 13418, - "Ġpanels": 13419, - "Ġ95": 13420, - "Ġhandsome": 13421, - "ĠпÑĢов": 13422, - "Ġpermit": 13423, - "Ġkein": 13424, - "Ġbadly": 13425, - "Ġnotifications": 13426, - "iza": 13427, - "ĠNotice": 13428, - "Ġinclusive": 13429, - "Ġanswering": 13430, - "ĠíĹ": 13431, - "uld": 13432, - "íħĮ": 13433, - "Ġnowadays": 13434, - "Ġ37": 13435, - "Ġbolt": 13436, - "Ġstatic": 13437, - "ĠHop": 13438, - "Ġavant": 13439, - "ajo": 13440, - "Ġ맼ìŀĪ": 13441, - "Ġfifty": 13442, - "ĠFinal": 13443, - "Ġscores": 13444, - "ĠTap": 13445, - "Ġcyl": 13446, - "Ġconvince": 13447, - "Ġanyways": 13448, - "oda": 13449, - "Ġìķ¼": 13450, - "Ġserves": 13451, - "ĠÑĤакой": 13452, - "ĠZoom": 13453, - "Ġsavings": 13454, - "ulo": 13455, - "Ġsouthern": 13456, - "viewer": 13457, - "Ġhoje": 13458, - "Ġseja": 13459, - "Ġrepresenting": 13460, - "Īëįĺ": 13461, - "lik": 13462, - "ĠSomebody": 13463, - "Ġbeast": 13464, - "Ġsticking": 13465, - "Ġinsist": 13466, - "Ġtalented": 13467, - "Ġexplaining": 13468, - "Ġattorney": 13469, - "éĥ¨": 13470, - "Ġstairs": 13471, - "ĠDog": 13472, - "íĭ": 13473, - "Ġcig": 13474, - "Ġshaped": 13475, - "Ġsons": 13476, - "Ïģι": 13477, - "utt": 13478, - "ĠìĶ": 13479, - "Ġparad": 13480, - "ìĿ¸ëį°": 13481, - "Ġhorn": 13482, - "ĠJour": 13483, - "anno": 13484, - "Ġworldwide": 13485, - "åĬĽ": 13486, - "Ġparticipation": 13487, - "¦Ħ": 13488, - "Ġmów": 13489, - "Ġburned": 13490, - "Ġwriters": 13491, - "allah": 13492, - "ĠFund": 13493, - "Ġclever": 13494, - "ĠLeute": 13495, - "bin": 13496, - "Ġbeating": 13497, - "foot": 13498, - "ĠìĽIJ": 13499, - "ĠStudio": 13500, - "Ġvag": 13501, - "bey": 13502, - "rze": 13503, - "Ġopposition": 13504, - "Ġжиз": 13505, - "who": 13506, - "Ġê±´": 13507, - "Ġtrace": 13508, - "ĠденÑĮ": 13509, - "Ġepid": 13510, - "Ġgesch": 13511, - "ĠNar": 13512, - "ĠBE": 13513, - "Ñĥй": 13514, - "ĠSign": 13515, - "edly": 13516, - "Ġclay": 13517, - "Ġinstantly": 13518, - "Ġgathering": 13519, - "ĠGalaxy": 13520, - "Ġbored": 13521, - "ĠBuddh": 13522, - "cé": 13523, - "Ġmam": 13524, - "Ġslope": 13525, - "Ġëĭ¤ìĿĮ": 13526, - "Ġschön": 13527, - "Ġpir": 13528, - "gef": 13529, - "amer": 13530, - "Ġhö": 13531, - "Ġcolleague": 13532, - "Ġpresents": 13533, - "adium": 13534, - "Ġவ": 13535, - "Ġfalar": 13536, - "beep": 13537, - "Ġdried": 13538, - "isms": 13539, - "Ġrope": 13540, - "Ġworkshop": 13541, - "Ġestud": 13542, - "Ġbands": 13543, - "Ġthemes": 13544, - "åħ¬": 13545, - "ÙĬر": 13546, - "åIJİ": 13547, - "Ġreminder": 13548, - "ÑĤÑĥ": 13549, - "ĠBh": 13550, - "Ġcoconut": 13551, - "ĠÑģÑĤо": 13552, - "ĠChannel": 13553, - "Ġimmigration": 13554, - "äs": 13555, - ".....": 13556, - "主": 13557, - "çĻ½": 13558, - "stop": 13559, - "ĠкаÑĢ": 13560, - "Ġcoins": 13561, - "ĠÑĩаÑģ": 13562, - "Ġdestruction": 13563, - "lined": 13564, - "Ġbarriers": 13565, - "antine": 13566, - "Ġprinted": 13567, - "Ġcongratulations": 13568, - "ĠHeart": 13569, - "Ġinqu": 13570, - "tha": 13571, - "Ġhardly": 13572, - "ĠAven": 13573, - "Ġtinha": 13574, - "ĠSony": 13575, - "ĠNF": 13576, - "Ġgraduates": 13577, - "Ġsqueeze": 13578, - "eremy": 13579, - "ÏĦι": 13580, - "Ġepic": 13581, - "ĠJu": 13582, - "Ġolm": 13583, - "ĠLaughter": 13584, - "Ġbeliefs": 13585, - "ĠCru": 13586, - "ĠTrue": 13587, - "ĠSoul": 13588, - "oween": 13589, - "Ġromantic": 13590, - "Ġзв": 13591, - "Ġanos": 13592, - "ĠYup": 13593, - "éĺ¿": 13594, - "dim": 13595, - "Ġinfer": 13596, - "Ġзам": 13597, - "Ġsoc": 13598, - "uka": 13599, - "Ġprecise": 13600, - "Ġdropping": 13601, - "Ġclue": 13602, - "Ġerrors": 13603, - "charge": 13604, - "ĠPu": 13605, - "ometer": 13606, - "Ġlambda": 13607, - "acional": 13608, - "ĠDong": 13609, - "Ġchamber": 13610, - "Ġthankful": 13611, - "ĠNu": 13612, - "ĠHawai": 13613, - "Ġinfo": 13614, - "Ġactivate": 13615, - "ĠQual": 13616, - "Ġqued": 13617, - "ÑĥлÑĮ": 13618, - "Ġcloth": 13619, - "åĸľ": 13620, - "Ġwichtig": 13621, - "55": 13622, - "Ġotra": 13623, - "ographer": 13624, - "Ġcurios": 13625, - "Ġ1980": 13626, - "Ġempres": 13627, - "dess": 13628, - "eur": 13629, - "Ġcluster": 13630, - "arter": 13631, - "obile": 13632, - "ĠYan": 13633, - "ĠAdv": 13634, - "Ġdiscipline": 13635, - "ĠìłķëıĦ": 13636, - "ĠPlace": 13637, - "ĠSelect": 13638, - "TE": 13639, - "ĠбÑĭла": 13640, - "Ġwhis": 13641, - "Ġbay": 13642, - "ĠDor": 13643, - "encing": 13644, - "Ġrepet": 13645, - "Ġficar": 13646, - "pad": 13647, - "Ġfog": 13648, - "uyor": 13649, - "Ġsnap": 13650, - "ibt": 13651, - "Ġsobie": 13652, - "Ġappointment": 13653, - "ĠRy": 13654, - "Ġceiling": 13655, - "ourse": 13656, - "Ġwrites": 13657, - "ĠAfghanistan": 13658, - "Ġmos": 13659, - "aze": 13660, - "Ġpenal": 13661, - "Ġcrystal": 13662, - "ICE": 13663, - "ê°IJ": 13664, - "éŁ": 13665, - "ĠTesla": 13666, - "Ġtheories": 13667, - "Ġappeal": 13668, - "Ġnewspaper": 13669, - "Ġcookies": 13670, - "æ©": 13671, - "ĠاÙĦÙĦ": 13672, - "Ġmaj": 13673, - "ĠGetting": 13674, - "kommen": 13675, - "ĠHeaven": 13676, - "ells": 13677, - "Ġdivine": 13678, - "Ä«": 13679, - "Ġakt": 13680, - "Ġhopes": 13681, - "ĠChen": 13682, - "wegen": 13683, - "***": 13684, - "ĠFrage": 13685, - "Ġни": 13686, - "ู": 13687, - "minister": 13688, - "nesota": 13689, - "which": 13690, - "Ġexplicit": 13691, - "Ġverdad": 13692, - "Ġgraduated": 13693, - "ĠPhilipp": 13694, - "QL": 13695, - "ĠMI": 13696, - "Ġdevot": 13697, - "Ġcure": 13698, - "Ġclosest": 13699, - "ĠÃĦ": 13700, - "Ġsexy": 13701, - "ãģĽ": 13702, - "ĠDeath": 13703, - "oko": 13704, - "ugu": 13705, - "ĠAnne": 13706, - "itarian": 13707, - "esa": 13708, - "егод": 13709, - "ĠDur": 13710, - "Ġ000": 13711, - "zeit": 13712, - "Ġtournament": 13713, - "Ġmelhor": 13714, - "ส": 13715, - "Ġindu": 13716, - "Ġflaw": 13717, - "Ġwars": 13718, - "ĠMind": 13719, - "ĠIron": 13720, - "ÑĤак": 13721, - "ĠVR": 13722, - "Ġsiz": 13723, - "ĠSouthern": 13724, - "Ġê·¸ëŁ¬ë": 13725, - "Ġawak": 13726, - "Ġìķŀ": 13727, - "Ġcube": 13728, - "believable": 13729, - "ifall": 13730, - "dis": 13731, - "Ġabandoned": 13732, - "mind": 13733, - "Ġparl": 13734, - "Ġclassical": 13735, - "èĭ": 13736, - "á»Ļt": 13737, - "ĠAuto": 13738, - "ĠBor": 13739, - "ç©": 13740, - "400": 13741, - "ĠSociety": 13742, - "Ġsubtle": 13743, - "Ġmissions": 13744, - "Ġremembered": 13745, - "ĠEither": 13746, - "Ġdafür": 13747, - "ORD": 13748, - "Ġintensity": 13749, - "ESIN": 13750, - "ĠCup": 13751, - "Ġrarely": 13752, - "Ġtoys": 13753, - "ĠCharlie": 13754, - "ợ": 13755, - "Ġglaube": 13756, - "Ġrounds": 13757, - "TIN": 13758, - "Ġcapability": 13759, - "Ġderivative": 13760, - "Ġreferring": 13761, - "ĠdÃ¥": 13762, - "ĠTALI": 13763, - "Ġcotton": 13764, - "Ġconfer": 13765, - "Ġcolumns": 13766, - "Ġliberal": 13767, - "Ġnunca": 13768, - "Ġμε": 13769, - "Ġindo": 13770, - "iben": 13771, - "ĠBeispiel": 13772, - "Ġê·¸ëłĩ": 13773, - "ĠÑĥÑĩ": 13774, - "Ġhoy": 13775, - "Ġfry": 13776, - "ĠScottish": 13777, - "èĬ": 13778, - "Ġciv": 13779, - "Ġconservative": 13780, - "Ġairpl": 13781, - "Ġsar": 13782, - "rus": 13783, - "Ġinvestments": 13784, - "Ġinfinite": 13785, - "Ġà®ķ": 13786, - "ĠTALIESIN": 13787, - "ĠGary": 13788, - "uell": 13789, - "Ġак": 13790, - "ĠCir": 13791, - "Ġritual": 13792, - "Ġ>>>": 13793, - "Ġtempt": 13794, - "ĠTech": 13795, - "ĠPokemon": 13796, - "Ġimprovements": 13797, - "Ġspare": 13798, - "Ġtranslate": 13799, - "Ġsonra": 13800, - "ĠFilm": 13801, - "wort": 13802, - "Ġми": 13803, - "Ġperiods": 13804, - "Ġjealous": 13805, - "ãģĦãģĦ": 13806, - "Ġtir": 13807, - "MI": 13808, - "Ġconducted": 13809, - "ĠìķĪëħķ": 13810, - "09": 13811, - "ĠPolit": 13812, - "ĠWhereas": 13813, - "Ġmoisture": 13814, - "Ġsins": 13815, - "Ġkap": 13816, - "ĠÑįк": 13817, - "Ġbenim": 13818, - "Ġeliminate": 13819, - "Ġathletes": 13820, - "ĠManager": 13821, - "Ġfeatured": 13822, - "apore": 13823, - "äºĽ": 13824, - "Ġë°ľ": 13825, - "Ġperf": 13826, - "ĠThus": 13827, - "Ġdebut": 13828, - "обÑĢ": 13829, - "Ġseñ": 13830, - "Ġmysterious": 13831, - "words": 13832, - "Ķê°Ģ": 13833, - "Ġchecks": 13834, - "Ġvolunteer": 13835, - "Ġwashing": 13836, - "ĠMarvel": 13837, - "ĠAB": 13838, - "issors": 13839, - "!'": 13840, - "ĠFull": 13841, - "yeon": 13842, - "Ġweigh": 13843, - "ĠJOHN": 13844, - "Ġvos": 13845, - "Ġprocedures": 13846, - "Ġaddressed": 13847, - "ĠBerlin": 13848, - "puter": 13849, - "ĠBan": 13850, - "Ġmedication": 13851, - "Ġdrone": 13852, - "ĠÑĥб": 13853, - "ĠJean": 13854, - "Ġcaps": 13855, - "Ġdisappointed": 13856, - "Ġwore": 13857, - "ĠêµŃ": 13858, - "Ġorganize": 13859, - "ĠHalloween": 13860, - "Ġfantasy": 13861, - "yard": 13862, - "Ġnosotros": 13863, - "Ġjumped": 13864, - "Ġphotography": 13865, - "ĠName": 13866, - "rec": 13867, - "AB": 13868, - "Ġblessing": 13869, - "ĠShut": 13870, - "Ġbitter": 13871, - "pop": 13872, - "ãģĿãĤĮ": 13873, - "Ġdei": 13874, - "Ġfulfill": 13875, - "çIJĨ": 13876, - "Ġdengan": 13877, - "Ġbelo": 13878, - "ĠMeanwhile": 13879, - "Ġdepois": 13880, - "Ġdiabetes": 13881, - "Ġbund": 13882, - "ĠZealand": 13883, - "Ġdigest": 13884, - "Ġtires": 13885, - "Ġdod": 13886, - "agne": 13887, - "ết": 13888, - "Ġpeel": 13889, - "Ġзаб": 13890, - "Ġnodes": 13891, - "Ġtrends": 13892, - "ĠSwitch": 13893, - "ĠAward": 13894, - "ĠOrig": 13895, - "ĠHal": 13896, - "Ġestas": 13897, - "Ġ360": 13898, - "Ġsimult": 13899, - "Ġcomic": 13900, - "ĠmÃł": 13901, - "Ġbalanced": 13902, - "ĠPrincess": 13903, - "Ġkilometers": 13904, - "ứ": 13905, - "Ġpartir": 13906, - "ì¤ij": 13907, - "soft": 13908, - "ĠView": 13909, - "Ġbiological": 13910, - "inst": 13911, - "44": 13912, - "Ġmanera": 13913, - "Ġcomprehensive": 13914, - "ĠSab": 13915, - "Ġcrimes": 13916, - "yers": 13917, - "ĠCompany": 13918, - "ĠPhot": 13919, - "Ġpouco": 13920, - "iac": 13921, - "Ġbeim": 13922, - "inate": 13923, - "Ġsubsequ": 13924, - "ĠMayor": 13925, - "Ġcenturies": 13926, - "ères": 13927, - "ìŀĸìķĦìļĶ": 13928, - "Ġê·¸ëŁ¼": 13929, - "ĠFrau": 13930, - "ĠOH": 13931, - "ĠëģĿ": 13932, - "ĠNah": 13933, - "ĠSeries": 13934, - "Ġovernight": 13935, - "íĴĪ": 13936, - "ĠâĢ¢": 13937, - "Ġtrave": 13938, - "attered": 13939, - "Ġwarri": 13940, - "ĠGrund": 13941, - "ĠIndones": 13942, - "Ġscra": 13943, - "oby": 13944, - "ĠBrook": 13945, - "Ġcurs": 13946, - "Ġë¸": 13947, - "Ġexplains": 13948, - "ramatic": 13949, - "Ġparticipating": 13950, - "Ġminut": 13951, - "Ġcontracts": 13952, - "Ġgegen": 13953, - "Ġdisappeared": 13954, - "ĠSN": 13955, - "Ġrobust": 13956, - "aph": 13957, - "Ġshrim": 13958, - "Ġdevast": 13959, - "cope": 13960, - "Ġmeets": 13961, - "Ġpeaceful": 13962, - "mate": 13963, - "Ġweld": 13964, - "Ġת": 13965, - "don": 13966, - "ÑĥÑĤÑĮ": 13967, - "Ġregistered": 13968, - "ĠNik": 13969, - "jin": 13970, - "Ġcav": 13971, - "Ġecht": 13972, - "iox": 13973, - "Ġflowing": 13974, - "ноÑģÑĤи": 13975, - "Ġtoe": 13976, - "Ġentity": 13977, - "ова": 13978, - "fits": 13979, - "ĠPatrick": 13980, - "ÑĤÑĢ": 13981, - "Ġleverage": 13982, - "Ġcorrel": 13983, - "iah": 13984, - "Ġstrings": 13985, - "istinct": 13986, - "Ġgue": 13987, - "archy": 13988, - "Ġtengo": 13989, - "ımız": 13990, - "Ġorbit": 13991, - "为": 13992, - "ĠеÑīÑij": 13993, - "cake": 13994, - "Ġ׾×Ķ": 13995, - "ĠMinnesota": 13996, - "Ġbrake": 13997, - "owie": 13998, - "Ġcraw": 13999, - "기를": 14000, - "Ġprogramme": 14001, - "ĠÑģлÑĥÑĩ": 14002, - "åıª": 14003, - "iences": 14004, - "ĠOui": 14005, - "ĠPers": 14006, - "imiento": 14007, - "ĠInvest": 14008, - "Ġslower": 14009, - "æĻĤåĢĻ": 14010, - "ĠBeth": 14011, - "Ġnurse": 14012, - "ĠSpring": 14013, - "Sp": 14014, - "Ġunemploy": 14015, - "ди": 14016, - "Ġgenius": 14017, - "ĠAaron": 14018, - "Ġê·¸ëŁ¬": 14019, - "Ġei": 14020, - "ãģĹãĤĩ": 14021, - "Ġtanks": 14022, - "Ġaujourd": 14023, - "Ġcomplexity": 14024, - "ĠÑĢеÑĪ": 14025, - "Ġoldest": 14026, - "Ġletz": 14027, - "åħ¥": 14028, - "Ġphenomenon": 14029, - "print": 14030, - "ĠBundes": 14031, - "itat": 14032, - "ê»ĺ": 14033, - "Ġ42": 14034, - "ĠWi": 14035, - "Ġincom": 14036, - "Ġgek": 14037, - "Ġembrace": 14038, - "Ġties": 14039, - "oute": 14040, - "Ġdose": 14041, - "ĠFriends": 14042, - "ÑĭÑĤ": 14043, - "егоднÑı": 14044, - "Ġorg": 14045, - "Ħë¡ľ": 14046, - "óg": 14047, - "Ġexceed": 14048, - "Ġgods": 14049, - "Ġê±°ìĺĪìļĶ": 14050, - "Ġsociet": 14051, - "ĠUnivers": 14052, - "ität": 14053, - "Ġworden": 14054, - "Ġsmoking": 14055, - "Ġintens": 14056, - "abul": 14057, - "emia": 14058, - "èij": 14059, - "47": 14060, - "fly": 14061, - "Ġ2006": 14062, - "ĠSeriously": 14063, - "Ġprzez": 14064, - "æ¼": 14065, - "cre": 14066, - "Ġnan": 14067, - "Ġmodes": 14068, - "оваÑĤÑĮ": 14069, - "ĠHang": 14070, - "emen": 14071, - "Ġbeneficial": 14072, - "Ġvoters": 14073, - "ĠBroad": 14074, - "Ġbent": 14075, - "Wow": 14076, - "Ġmul": 14077, - "åĵ¥": 14078, - "ĠUC": 14079, - "Ġdamaged": 14080, - "ĠUkraine": 14081, - "Ġwipe": 14082, - "Ġstones": 14083, - "Ġmanagers": 14084, - "Ġrab": 14085, - "ÑģÑĤÑĢо": 14086, - "lat": 14087, - "Ġdece": 14088, - "Ġgraphic": 14089, - "Ġfoss": 14090, - "Ġdisagree": 14091, - "ĠAmen": 14092, - "Ġsecrets": 14093, - "hole": 14094, - "inkle": 14095, - "Ġfortunate": 14096, - "Ġì±": 14097, - "ìľĦ": 14098, - "èIJ¬": 14099, - "Ġhabits": 14100, - "Ġburied": 14101, - "Ġhin": 14102, - "Ġvirtually": 14103, - "olas": 14104, - "ĠRP": 14105, - "ĠTab": 14106, - "low": 14107, - "Ġsacrific": 14108, - "Ġestimated": 14109, - "oln": 14110, - "Ùĭ": 14111, - "cur": 14112, - "ĠFeel": 14113, - "Ġcastle": 14114, - "Ġuseless": 14115, - "Ġdisg": 14116, - "ĠJacob": 14117, - "Ġgaan": 14118, - "Ġupside": 14119, - "Ġparece": 14120, - "ãĥ³ãĥ": 14121, - "Ġshipping": 14122, - "ĠCR": 14123, - "Ġdisrupt": 14124, - "acter": 14125, - "UND": 14126, - "fu": 14127, - "å®Į": 14128, - "ĠPick": 14129, - "ĠCharl": 14130, - "ĠBull": 14131, - "Ġenterprise": 14132, - "Ġpunishment": 14133, - "acking": 14134, - "Ġfraction": 14135, - "Ġtablet": 14136, - "Ġchord": 14137, - "Ġsimilarly": 14138, - "åħ¶å¯¦": 14139, - "ĠToronto": 14140, - "Ġcourts": 14141, - "ÄŁl": 14142, - "eszcze": 14143, - "Ġpronoun": 14144, - "ĠSister": 14145, - "ĠMP": 14146, - "Ġgreatly": 14147, - "ĠDank": 14148, - "icop": 14149, - "Ġgarbage": 14150, - "Ġresolve": 14151, - "ĠSaf": 14152, - "ĠGun": 14153, - "Ġcompound": 14154, - "Ġë°°": 14155, - "ĠMusik": 14156, - "âĻ«": 14157, - "Ġchaos": 14158, - "ĠWhenever": 14159, - "Ġeuros": 14160, - "Ġorchest": 14161, - "Ġrefriger": 14162, - "alan": 14163, - "ื": 14164, - "ĠAmazing": 14165, - "Ġpud": 14166, - "agan": 14167, - "Ġjeszcze": 14168, - "isy": 14169, - "Ġaccuracy": 14170, - "ĠAma": 14171, - "isode": 14172, - "ëĮĢ": 14173, - "Ġinterpretation": 14174, - "ĠLiber": 14175, - "æ·": 14176, - "cam": 14177, - "Ġevolved": 14178, - "ĠKay": 14179, - "ÑĨÑĭ": 14180, - "Ġcreator": 14181, - "itas": 14182, - "Ġalarm": 14183, - "Ġcelebration": 14184, - "zent": 14185, - "Ġfuncion": 14186, - "Ġov": 14187, - "umbling": 14188, - "Ġ%": 14189, - "à¸Ī": 14190, - "Ġrestrictions": 14191, - "Ġнав": 14192, - "ĠKinder": 14193, - "Ġbanana": 14194, - "ÑĮÑı": 14195, - "Ġdiameter": 14196, - "Ġnorthern": 14197, - "urers": 14198, - "ĠPas": 14199, - "æĪijçļĦ": 14200, - "Ġworkforce": 14201, - "Ġjung": 14202, - "Ġguarante": 14203, - "Ġequilib": 14204, - "Ġsuite": 14205, - "Ġeuro": 14206, - "Ġdeliber": 14207, - "Ste": 14208, - "Ġdowntown": 14209, - "Ġchin": 14210, - "Ġcodes": 14211, - "edia": 14212, - "Ġsheep": 14213, - "reshold": 14214, - "wnie": 14215, - "ób": 14216, - "Ġunderlying": 14217, - "lia": 14218, - "jer": 14219, - "ÏĢÏĮ": 14220, - "çĿ": 14221, - "throp": 14222, - "Ġzap": 14223, - "Ġvacuum": 14224, - "ĠHab": 14225, - "Ġwrapped": 14226, - "ì¢": 14227, - "Ġinventory": 14228, - "ма": 14229, - "Ġcoord": 14230, - "Ġplates": 14231, - "Ġsymm": 14232, - "Te": 14233, - "ĠwÅĤaÅĽnie": 14234, - "Ġreaches": 14235, - "Ġlonely": 14236, - "Script": 14237, - "lee": 14238, - "esser": 14239, - "Ġ걸": 14240, - "ĠGesch": 14241, - "ĠMoving": 14242, - "Ġrép": 14243, - "ĠVill": 14244, - "åIJĪ": 14245, - "ĠRachel": 14246, - "Ġtemos": 14247, - "ONE": 14248, - "Ġstrain": 14249, - "Ġangel": 14250, - "ĠfÃ¥": 14251, - "Tr": 14252, - "Ġacho": 14253, - "Ġhighlights": 14254, - "ĠWer": 14255, - "ĠCarl": 14256, - "Ġblur": 14257, - "Ġregards": 14258, - "·": 14259, - "илÑģÑı": 14260, - "Ġrecre": 14261, - "ĠYani": 14262, - "UCK": 14263, - "ł¸": 14264, - "Ġelectrons": 14265, - "ĠSpiel": 14266, - "Ġved": 14267, - "Ú¾": 14268, - "Ġbeam": 14269, - "Ġidiot": 14270, - "ëĵ¤": 14271, - "наÑĩ": 14272, - "idd": 14273, - "Ġski": 14274, - "itative": 14275, - "Ġhypothes": 14276, - "ãģ§ãģĻãģŃ": 14277, - "enter": 14278, - "ĠìķĦëĭĪë": 14279, - "Ġihre": 14280, - "Ġpreview": 14281, - "angel": 14282, - "Ġdemon": 14283, - "Ġdus": 14284, - "Ġdic": 14285, - "ĠKom": 14286, - "LEY": 14287, - "...!": 14288, - "Ġsieht": 14289, - "ĠSonic": 14290, - "Ġtenho": 14291, - "anas": 14292, - "Ġdigit": 14293, - "ĠMaar": 14294, - "Ġundergrad": 14295, - "ouncer": 14296, - "uffy": 14297, - "Ġconversion": 14298, - "Ġdisconnect": 14299, - "Ġecho": 14300, - "omer": 14301, - "Ġcurriculum": 14302, - "Ġperché": 14303, - "Ġwand": 14304, - "..?": 14305, - "Ġrolled": 14306, - "Ġentrepreneur": 14307, - "Ġtheoret": 14308, - "ĠÑīо": 14309, - "Ġinsights": 14310, - "Ġzusammen": 14311, - "oin": 14312, - "rett": 14313, - "produ": 14314, - "Ġvisitors": 14315, - "eous": 14316, - "Ġgrandmother": 14317, - "Ġhumor": 14318, - "ĠниÑħ": 14319, - "zenia": 14320, - "inson": 14321, - "Ġreset": 14322, - "Ġbaseball": 14323, - "Ġmatching": 14324, - "ëĭ¤ê°Ģ": 14325, - "Ġpunto": 14326, - "ì¡": 14327, - "Ġrede": 14328, - "Ġaddressing": 14329, - "Ġforecast": 14330, - "ĠBol": 14331, - "Ġcolored": 14332, - "Ġdocumentation": 14333, - "Ġexpectation": 14334, - "ĠNorthern": 14335, - "Ġcreo": 14336, - "Ġà®ļ": 14337, - "fon": 14338, - "Ġunsere": 14339, - "UM": 14340, - "Ġcopies": 14341, - "Ġexpanded": 14342, - "Ġveterans": 14343, - "ĠAlm": 14344, - "ĠвообÑīе": 14345, - "Ġpsychological": 14346, - "Ġnosso": 14347, - "Ġpayments": 14348, - "imeters": 14349, - "Ġ-->": 14350, - "ĠJennifer": 14351, - "Ġvolunteers": 14352, - "osse": 14353, - "orious": 14354, - "ĠбÑĭли": 14355, - "èĤ": 14356, - "ĠEss": 14357, - "ws": 14358, - "ĠBC": 14359, - "ĠIC": 14360, - "Woman": 14361, - "Ġvont": 14362, - "Ġethnic": 14363, - "ENN": 14364, - "имо": 14365, - "Ġlob": 14366, - "Ġoui": 14367, - "cs": 14368, - "Ġrehe": 14369, - "Ġìłģ": 14370, - "Ġchick": 14371, - "úsica": 14372, - "Ġkont": 14373, - "ĠDistrict": 14374, - "Ġpile": 14375, - "Ġав": 14376, - "ейÑģÑĤв": 14377, - "Ġ£": 14378, - "Ġissued": 14379, - "Ġкомп": 14380, - "Ġprosper": 14381, - "Ġprofound": 14382, - "ĠDear": 14383, - "Ġãģĵ": 14384, - "Ġfunded": 14385, - "Ġbisa": 14386, - "ŀĺë": 14387, - "ף": 14388, - "ĠìĿĺ": 14389, - "Ġtwelve": 14390, - "ĠChampions": 14391, - "éĿŀ常": 14392, - "Ñģл": 14393, - "Ġ2005": 14394, - "pm": 14395, - "Ġonde": 14396, - "Ġdiffé": 14397, - "ĠChall": 14398, - "Ġdifficulties": 14399, - "Ġgarage": 14400, - "Ġdá": 14401, - "ünk": 14402, - "Ġ물": 14403, - "Ġtran": 14404, - "Ġsubmitted": 14405, - "zw": 14406, - "ÙĪا": 14407, - "Ġark": 14408, - "ĠìĦ±": 14409, - "Ġgrocery": 14410, - "она": 14411, - "iere": 14412, - "Ġaest": 14413, - "Ġexhibition": 14414, - "Ġrés": 14415, - "Ġconsistency": 14416, - "Ġcookie": 14417, - "ней": 14418, - "Ġreplacement": 14419, - "æ²¹": 14420, - "ĠSem": 14421, - "ĠìĤ¬ìļ©": 14422, - "800": 14423, - "Ġgenes": 14424, - "Ġtransaction": 14425, - "ĠEL": 14426, - "Ġdurante": 14427, - "ibles": 14428, - "ĠEat": 14429, - "tail": 14430, - "issance": 14431, - "Ġtoss": 14432, - "Ġsurvived": 14433, - "Ġoffices": 14434, - "Ġsupportive": 14435, - "Where": 14436, - "Ġtoutes": 14437, - "Ġë§ī": 14438, - "Ġjokes": 14439, - "ieron": 14440, - "apers": 14441, - "Ġmature": 14442, - "ĠMarsh": 14443, - "Ġsido": 14444, - "kind": 14445, - "Ġrealmente": 14446, - "ĠChef": 14447, - "Ġquelque": 14448, - "Ġjudges": 14449, - "eft": 14450, - "ERS": 14451, - "Ġjet": 14452, - "Ġpersons": 14453, - "è»": 14454, - "izations": 14455, - "rik": 14456, - "Ġshops": 14457, - "ĠWy": 14458, - "Ġeleg": 14459, - "què": 14460, - "quoi": 14461, - "Ġjuga": 14462, - "Ġíķľë²Ī": 14463, - "ĠQuestion": 14464, - "ĠGlobal": 14465, - "Ġìķ½ê°Ħ": 14466, - "ĠStation": 14467, - "æİ¥": 14468, - "ĠOhio": 14469, - "Ġsticky": 14470, - "Ġstressed": 14471, - "Ġgün": 14472, - "ĠíĿ": 14473, - "ÑģÑĤÑĥп": 14474, - "é¡Į": 14475, - "ĠPhD": 14476, - "immer": 14477, - "Ġmentor": 14478, - "Ġinvented": 14479, - "Ġreun": 14480, - "Ġinevit": 14481, - "ĠpolÃŃt": 14482, - "Ġexecute": 14483, - "ĠStory": 14484, - "Ġoutstanding": 14485, - "Ġguer": 14486, - "ĠRain": 14487, - "Ġchoses": 14488, - "ĠTit": 14489, - "ĠÑģеÑĢ": 14490, - "ĠSingapore": 14491, - "ĠNone": 14492, - "Ġchronic": 14493, - "°ëį°": 14494, - "Ġego": 14495, - "æł·": 14496, - "EST": 14497, - "ãģĤãĤĬ": 14498, - "ĠWang": 14499, - "ĠNAT": 14500, - "Ġaug": 14501, - "Ġdesktop": 14502, - "Ġeternal": 14503, - "ĠìĤ¬ìĭ¤": 14504, - "ĠConstitution": 14505, - "ìĤ¬ë": 14506, - "×Ļ׾": 14507, - "pres": 14508, - "ĠТÑĭ": 14509, - "Ġinterf": 14510, - "Ġlists": 14511, - "Ġfights": 14512, - "ften": 14513, - "ĠIowa": 14514, - "Ġmotivated": 14515, - "ĠHosp": 14516, - "Ġelsewhere": 14517, - "Ġpaths": 14518, - "Ġinstances": 14519, - "Bl": 14520, - "range": 14521, - "á»±": 14522, - "ĠSit": 14523, - "mana": 14524, - "Ġìĭľìŀij": 14525, - "Ġmình": 14526, - "ansas": 14527, - "Ġsna": 14528, - "Ġphilosoph": 14529, - "Ġpasse": 14530, - "Æ°á»Ŀi": 14531, - "akh": 14532, - "ental": 14533, - "Ġihn": 14534, - "ructor": 14535, - "ĠваÑĪ": 14536, - "Ġgenerous": 14537, - "Ġpivot": 14538, - "пол": 14539, - "Ġjamais": 14540, - "Ġcoment": 14541, - "ĠLew": 14542, - "odzi": 14543, - "ĠXbox": 14544, - "Ġвод": 14545, - "Ġconsent": 14546, - "īìŀ¥": 14547, - "Ġdispar": 14548, - "lass": 14549, - "ĠGovernor": 14550, - "Beifall": 14551, - "Ġê°ľ": 14552, - "Ġbeloved": 14553, - "׳×ķ": 14554, - "sell": 14555, - "Ġhonored": 14556, - "leh": 14557, - "Ġwäre": 14558, - "unting": 14559, - "Ġfraud": 14560, - "ĠRAM": 14561, - "걸": 14562, - "Ġkills": 14563, - "Ġeconomics": 14564, - "04": 14565, - "пеÑĢ": 14566, - "Ġcoisas": 14567, - "ĠигÑĢ": 14568, - "ÃŃm": 14569, - "Ġmöchte": 14570, - "Ġìµľ": 14571, - "Ġstimul": 14572, - "Ġfastest": 14573, - "lv": 14574, - "Ġgén": 14575, - "ĠSounds": 14576, - "Ġ1970": 14577, - "Ġhomework": 14578, - "speaking": 14579, - "Ġencouraging": 14580, - "Ġquery": 14581, - "Ġrevers": 14582, - "profit": 14583, - "Ġdy": 14584, - "Ġìŀij": 14585, - "ëĬĶëį°ìļĶ": 14586, - "Ġsoap": 14587, - "ĠGall": 14588, - "ĠCN": 14589, - "ĠAns": 14590, - "Ġfic": 14591, - "anks": 14592, - "Ġdessert": 14593, - "ĠìłĢíĿ¬": 14594, - "ĠMaking": 14595, - "Ġcomeç": 14596, - "ê³Ħ": 14597, - "Ġassociation": 14598, - "Dad": 14599, - "hee": 14600, - "Ġhogy": 14601, - "Ġapro": 14602, - "Ġinvisible": 14603, - "American": 14604, - "íİ": 14605, - "Ġvibe": 14606, - "Ġemissions": 14607, - "Ġadvocate": 14608, - "Ġkicked": 14609, - "Ġvel": 14610, - "Ġsummar": 14611, - "Ġfreaking": 14612, - "chron": 14613, - "Ġpinch": 14614, - "Ġwszystk": 14615, - "iscal": 14616, - "Ġproved": 14617, - "Ġmindful": 14618, - "Ġtä": 14619, - "Ġnoises": 14620, - "Ġisolated": 14621, - "Ġcrossed": 14622, - "Ġê°ķ": 14623, - "ĠvoilÃł": 14624, - "Ġchore": 14625, - "ĠRA": 14626, - "Com": 14627, - "Ġrelaxed": 14628, - "atro": 14629, - "Ġprevention": 14630, - "Voiceover": 14631, - "OD": 14632, - "ĠCovid": 14633, - "Ġseparation": 14634, - "Ġ-[": 14635, - "иÑĩего": 14636, - "çĻ¼": 14637, - "ĠSD": 14638, - "bleep": 14639, - "Ġindependence": 14640, - "Ġpartial": 14641, - "Ġalgorithms": 14642, - "ĠAnyone": 14643, - "Ġassociate": 14644, - "hum": 14645, - "icular": 14646, - "Ġbạn": 14647, - "Ġbattles": 14648, - "Good": 14649, - "Applause": 14650, - "Ġbastante": 14651, - "Ġadvant": 14652, - "ĠSweet": 14653, - "Ġrefused": 14654, - "ãĤ¸": 14655, - "ĠÑĤебе": 14656, - "plet": 14657, - "Ġencouraged": 14658, - "åĵ¦": 14659, - "Ġmiracle": 14660, - "ĠBun": 14661, - "ĠVar": 14662, - "rimination": 14663, - "elect": 14664, - "ĠMult": 14665, - "Ġdelivering": 14666, - "eing": 14667, - "Ġcm": 14668, - "nehmen": 14669, - "ĠLine": 14670, - "Ġë§Į": 14671, - "enced": 14672, - "ĠSound": 14673, - "ĠContin": 14674, - "ijd": 14675, - "UNG": 14676, - "kle": 14677, - "Ġthreshold": 14678, - "Ġcompact": 14679, - "adt": 14680, - "Ġtoes": 14681, - "ĠPur": 14682, - "owned": 14683, - "mented": 14684, - "Ġdesigning": 14685, - "Ġvaccinated": 14686, - "Ġexhaust": 14687, - "Ġbasics": 14688, - "Ġconsists": 14689, - "ĠGuy": 14690, - "aczy": 14691, - "ĠmÃŃ": 14692, - "won": 14693, - "害": 14694, - "Ġ85": 14695, - "æĤ": 14696, - "Ġmum": 14697, - "Ġignor": 14698, - "Ġprinting": 14699, - "acular": 14700, - "pow": 14701, - "Ġexpanding": 14702, - "Ġgir": 14703, - "ĠCab": 14704, - "íĺ¸": 14705, - "ÑĤÑĮÑģÑı": 14706, - "ĠìŬ룬ë¶Ħ": 14707, - "Ġangles": 14708, - "Ġterminal": 14709, - "ĠWon": 14710, - "ĠInteresting": 14711, - "Ġcrossing": 14712, - "Ġbonds": 14713, - "Ġpueden": 14714, - "Ġorb": 14715, - "ların": 14716, - "Ġcreepy": 14717, - "Ġnutrition": 14718, - "Ġallies": 14719, - "Ġwireless": 14720, - "Ġdesired": 14721, - "Ġcompute": 14722, - "ĠArizona": 14723, - "ĠBeautiful": 14724, - "Ġproduces": 14725, - "Ġnuestro": 14726, - "ted": 14727, - "Ġeligible": 14728, - "ĠÑģоз": 14729, - "icial": 14730, - "ĠHero": 14731, - "Ġconsume": 14732, - "Ġrobots": 14733, - "Ġpurchased": 14734, - "cción": 14735, - "Ġiz": 14736, - "ược": 14737, - "ίναι": 14738, - "ĠØ£ÙĨ": 14739, - "Ġshadows": 14740, - "ĠMedia": 14741, - "Ġprincess": 14742, - "Ġklar": 14743, - "Ġwooden": 14744, - "Ġusar": 14745, - "Ġgüzel": 14746, - "Ġslot": 14747, - "rade": 14748, - "ĠëĴ": 14749, - "Ġharmon": 14750, - "Ġingredient": 14751, - "orship": 14752, - "eki": 14753, - "Ġgrandfather": 14754, - "Ġexcitement": 14755, - "Ġpoliticians": 14756, - "..!": 14757, - "Ġouts": 14758, - "Ġseparately": 14759, - "ĠÑıк": 14760, - "ĠWelt": 14761, - "ĠPow": 14762, - "jan": 14763, - "Ġorientation": 14764, - "åıĭ": 14765, - "LC": 14766, - "agem": 14767, - "ÛĮÚº": 14768, - "åIJĹ": 14769, - "Ġbranches": 14770, - "aden": 14771, - "rente": 14772, - "ĠIhr": 14773, - "asm": 14774, - "Ġestão": 14775, - "ĠNic": 14776, - "Ġslave": 14777, - "Ġcompress": 14778, - "crowd": 14779, - "Ġclimbing": 14780, - "ĠManagement": 14781, - "ĠBah": 14782, - "Ġpanic": 14783, - "Ġkor": 14784, - "Ġcooling": 14785, - "Ġbind": 14786, - "Ġзад": 14787, - "Ġrack": 14788, - "Ġentit": 14789, - "Ġsends": 14790, - "Ġyourselves": 14791, - "des": 14792, - "ĠMuslims": 14793, - "Ġíļ": 14794, - "isma": 14795, - "cycle": 14796, - "unkt": 14797, - "ĠCore": 14798, - "Ġinjuries": 14799, - "Ġidentical": 14800, - "каÑı": 14801, - "ĠDeutschland": 14802, - "Ġее": 14803, - "isan": 14804, - "Ġtruc": 14805, - "leton": 14806, - "Ġbackup": 14807, - "Ġultra": 14808, - "Ġabund": 14809, - "illeurs": 14810, - "ĠbyÅĤo": 14811, - "åħĥ": 14812, - "orted": 14813, - "Ġearthqu": 14814, - "Ġкл": 14815, - "Ġobservation": 14816, - "Ġmaintenant": 14817, - "elen": 14818, - "Ġsettled": 14819, - "Ġpela": 14820, - "ĠEconom": 14821, - "ĠÕ": 14822, - "Ġsteering": 14823, - "ĠALL": 14824, - "ĠCher": 14825, - "Ġpatience": 14826, - "ĠSnow": 14827, - "Ġbor": 14828, - "Ġworthy": 14829, - "Ġcái": 14830, - "Ġק": 14831, - "Ġκα": 14832, - "dog": 14833, - "ĠKaren": 14834, - "illes": 14835, - "β": 14836, - "Ġagriculture": 14837, - "×ķף": 14838, - "ĠSean": 14839, - "Ġsensors": 14840, - "íķ´ë": 14841, - "agh": 14842, - "Ġpublicly": 14843, - "Ġpeux": 14844, - "ĠAlexander": 14845, - "Ġpriorit": 14846, - "Ġlazy": 14847, - "ardon": 14848, - "attering": 14849, - "Ġcostume": 14850, - "ست": 14851, - "è¿ĺ": 14852, - "Ġunw": 14853, - "ÐĽ": 14854, - "Ġthickness": 14855, - "quito": 14856, - "gunt": 14857, - "istas": 14858, - "neys": 14859, - "ĠëIJĺê²Į": 14860, - "ĠBrasil": 14861, - "Ġtoken": 14862, - "Ġaffili": 14863, - "lon": 14864, - "ĠfÃ¥r": 14865, - "ĠBeach": 14866, - "Ġwitch": 14867, - "ĠSeven": 14868, - "Ġpant": 14869, - "λλ": 14870, - "Ġcaptain": 14871, - "åĿ": 14872, - "Ġveut": 14873, - "Ġpouvoir": 14874, - "acz": 14875, - "ĠBarb": 14876, - "Ġutility": 14877, - "Ġcontemporary": 14878, - "Ġobtained": 14879, - "Ġpaintings": 14880, - "ear": 14881, - "Ġpean": 14882, - "ĠOg": 14883, - "Ġcust": 14884, - "лем": 14885, - "Ĥĺë": 14886, - "ĠIsso": 14887, - "Ġaconte": 14888, - "ĠTele": 14889, - "ĠAssistant": 14890, - "Ãī": 14891, - "íĸĪìĬµëĭĪëĭ¤": 14892, - "Ġcounts": 14893, - "Ġbuck": 14894, - "ĠDeep": 14895, - "Ġtackle": 14896, - "Ġharsh": 14897, - "Ġdecides": 14898, - "éĹľ": 14899, - ".âĢĭ": 14900, - "éĤĬ": 14901, - "ĠAngel": 14902, - "Ġlaying": 14903, - "Ġcalories": 14904, - "Ġcontrolling": 14905, - "Ġadvantages": 14906, - "ĠÑįÑĤой": 14907, - "Ġapproaching": 14908, - "Ġthreats": 14909, - "akan": 14910, - "ematic": 14911, - "mann": 14912, - "ê³µ": 14913, - "mumbles": 14914, - "ació": 14915, - "Ġmaintaining": 14916, - "Ġfounder": 14917, - "lah": 14918, - "fight": 14919, - "Ġadmitted": 14920, - "âĢ¦.": 14921, - "ķĮ": 14922, - "abol": 14923, - "Ġusage": 14924, - "Ġnonsense": 14925, - "ĠPalest": 14926, - "Ġcontre": 14927, - "ĠDemocratic": 14928, - "ĠER": 14929, - "jekt": 14930, - "Ġarbit": 14931, - "Ġгол": 14932, - "ĠMichelle": 14933, - "icher": 14934, - "esh": 14935, - "ĠPho": 14936, - "ком": 14937, - "49": 14938, - "ĠEnergy": 14939, - "οÏį": 14940, - "Ġcents": 14941, - "Ġrefers": 14942, - "Ġgospel": 14943, - "ĠSha": 14944, - "ĠShare": 14945, - "×Ļ׳": 14946, - "Ġclinic": 14947, - "ĠëĦ£": 14948, - "Ġequality": 14949, - "ugs": 14950, - "Ġshed": 14951, - "Ġplanes": 14952, - "Ġtoute": 14953, - "reck": 14954, - "Ġstrand": 14955, - "Ġbiology": 14956, - "Ġleague": 14957, - "ĠPok": 14958, - "Ġnúmero": 14959, - "ĠCoast": 14960, - "Ġconsistently": 14961, - "Ġnucle": 14962, - "OOOO": 14963, - "Ġobjet": 14964, - "Ġchor": 14965, - "Ġginger": 14966, - "Ġdabei": 14967, - "Ġcooperation": 14968, - "à¯į.": 14969, - "nten": 14970, - "ç¤": 14971, - "lÃł": 14972, - "ìĸij": 14973, - "rado": 14974, - "Ġpassive": 14975, - "Ġgloves": 14976, - "Ġunderground": 14977, - "Ġlogical": 14978, - "Ġket": 14979, - "Ġfunctionality": 14980, - "¸ë¦¬": 14981, - "Ġportal": 14982, - "eller": 14983, - "×Ļר": 14984, - "ĠTed": 14985, - "ĠGre": 14986, - "IJľ": 14987, - "Ġpersonnel": 14988, - "Ġemerging": 14989, - "ĠFür": 14990, - "Ġmeantime": 14991, - "usalem": 14992, - "ĠClear": 14993, - "Ġtrapped": 14994, - "Ġìļ°": 14995, - "Ġdispl": 14996, - "Ġmettre": 14997, - "Ġmunicip": 14998, - "Ġwithdraw": 14999, - "Ġspat": 15000, - "unes": 15001, - "Ġaccessibility": 15002, - "æĪij们": 15003, - "Ġapare": 15004, - "Ġprospect": 15005, - "Ġназ": 15006, - "Ġcopper": 15007, - "ĠPRO": 15008, - "ÏħÏĦ": 15009, - "Ġattacking": 15010, - "ĠVin": 15011, - "ĠStone": 15012, - "Ġinvestigate": 15013, - "style": 15014, - "Ġλ": 15015, - "ë¡Ŀ": 15016, - "ë§Ī": 15017, - "Ġinspect": 15018, - "Ġliver": 15019, - "алиÑģÑĮ": 15020, - "Ġsera": 15021, - "halten": 15022, - "eman": 15023, - "Ġministry": 15024, - "''": 15025, - "Ġdots": 15026, - "ãħĭãħĭãħĭãħĭ": 15027, - "ÑĥÑģÑĤ": 15028, - "ĠJak": 15029, - "AKE": 15030, - "Ġgaps": 15031, - "ucker": 15032, - "ĠинÑĤеÑĢеÑģ": 15033, - "ĠEmily": 15034, - "Ġinterval": 15035, - "Ġtender": 15036, - "ĠTechnology": 15037, - "game": 15038, - "Ġtrib": 15039, - "ÙĦا": 15040, - "ĠDevelopment": 15041, - "Ùħا": 15042, - "Ġwrist": 15043, - "Ġfires": 15044, - "Ġtargeted": 15045, - "ìłIJ": 15046, - "Ġsod": 15047, - "íļĮ": 15048, - "ĠolduÄŁ": 15049, - "Ġseasons": 15050, - "ventions": 15051, - "Ġнего": 15052, - "Ġsometime": 15053, - "лив": 15054, - "né": 15055, - "Ġtú": 15056, - "ĠDeus": 15057, - "Ġexecution": 15058, - "áp": 15059, - "ĠChange": 15060, - "ĠIndeed": 15061, - "Ġregulation": 15062, - "ĠHung": 15063, - "éis": 15064, - "Ġwishes": 15065, - "Ġjazz": 15066, - "Ġstructural": 15067, - "Ġblowing": 15068, - "ĠbyÄĩ": 15069, - "Ġthermal": 15070, - "phant": 15071, - "ÑĢÑĥз": 15072, - "анÑĤ": 15073, - "ĠPull": 15074, - "Ġconfusion": 15075, - "нÑĭми": 15076, - "Ġscenarios": 15077, - "ìłģìľ¼ë¡ľ": 15078, - "ĠдеÑĤ": 15079, - "Ġtattoo": 15080, - "Ġautre": 15081, - "Ġheating": 15082, - "Ġtreating": 15083, - "Ġпоним": 15084, - "Ġexclus": 15085, - "ĠLOL": 15086, - "wear": 15087, - "agle": 15088, - "Ġzurück": 15089, - "Ġrational": 15090, - "su": 15091, - "Ġdeter": 15092, - "ĠNative": 15093, - "à®ķள": 15094, - "ached": 15095, - "Ġãĥ": 15096, - "ĠEntonces": 15097, - "Ġhora": 15098, - "ìĿ´ìĹIJìļĶ": 15099, - "Ġlite": 15100, - "ë": 15101, - "Ġsixth": 15102, - "Ġболее": 15103, - "actor": 15104, - "Ġpsychology": 15105, - "缸": 15106, - "Ġdemands": 15107, - "Ġpeer": 15108, - "Ġnewly": 15109, - "ĠWWE": 15110, - "Donald": 15111, - "ĠBox": 15112, - "Ġpine": 15113, - "Ġloading": 15114, - "ĠNico": 15115, - "ĠsÅĤ": 15116, - "omme": 15117, - "ART": 15118, - "Ġrecruit": 15119, - "Ġbugs": 15120, - "arents": 15121, - "ĠпÑĢоб": 15122, - "ĠInside": 15123, - "ipper": 15124, - "dramatic": 15125, - "Ġplanets": 15126, - "orde": 15127, - "Ġyoga": 15128, - "child": 15129, - "ĠMarie": 15130, - "ĠãģĤ": 15131, - "ĠBL": 15132, - "Ġfilmed": 15133, - "Ġrefresh": 15134, - "Ġtomatoes": 15135, - "Ġfet": 15136, - "Qué": 15137, - "Ġ!!": 15138, - "ĠëĤ´ë": 15139, - "rine": 15140, - "Ġinteractive": 15141, - "sal": 15142, - "annah": 15143, - "pez": 15144, - "ç¶ĵ": 15145, - "Ġunderstands": 15146, - "ĠTokyo": 15147, - "Ġlibraries": 15148, - "Ġreader": 15149, - "ijIJ": 15150, - "oz": 15151, - "ĠEnde": 15152, - "ĠFlo": 15153, - "Ġmild": 15154, - "Ġpoetry": 15155, - "Ġжив": 15156, - "æĦĽ": 15157, - "Ġbehave": 15158, - "Ġdoen": 15159, - "ĠSusan": 15160, - "page": 15161, - "raham": 15162, - "Ġcommunications": 15163, - "Ġtuning": 15164, - "Ġpac": 15165, - "Ġanxious": 15166, - "IO": 15167, - "Mark": 15168, - "Ġhiç": 15169, - "books": 15170, - "Ġpiss": 15171, - "Ġenabled": 15172, - "achelor": 15173, - "ĠFOR": 15174, - "Ġéc": 15175, - "ĠTR": 15176, - "ilst": 15177, - "hat": 15178, - "ĠìĿĮ": 15179, - "Ġtych": 15180, - "Ġjar": 15181, - "Ġbuilds": 15182, - "ĠArgent": 15183, - "Ġintermedi": 15184, - "Ġlou": 15185, - "Ġara": 15186, - "Ġassignment": 15187, - "Ġcabinet": 15188, - "Ġretirement": 15189, - "ãģ»": 15190, - "Ġdisabled": 15191, - "rica": 15192, - "Ġawards": 15193, - "Ġboots": 15194, - "Ġacknowled": 15195, - "Ġthy": 15196, - "Ġ구": 15197, - "Ġsynd": 15198, - "ний": 15199, - "ilton": 15200, - "Ġprobl": 15201, - "ĠFal": 15202, - "Ġverdade": 15203, - "Ġ700": 15204, - "ĠLearning": 15205, - "ocus": 15206, - "Ġpalace": 15207, - "Not": 15208, - "tain": 15209, - "cm": 15210, - "Ġmagnet": 15211, - "incoln": 15212, - "Ġfiguring": 15213, - "ĠLyn": 15214, - "ĠBoss": 15215, - "ĠVO": 15216, - "Ġdiagnosis": 15217, - "Ġequipped": 15218, - "watch": 15219, - "inos": 15220, - "aders": 15221, - "Ġshelf": 15222, - "Ġorganis": 15223, - "Ġnod": 15224, - "Ġkız": 15225, - "ppers": 15226, - "Ġrestore": 15227, - "Ġartic": 15228, - "ĠVoice": 15229, - "ıyorum": 15230, - "격": 15231, - "Ġspreading": 15232, - "Ġhips": 15233, - "Ġward": 15234, - "ureau": 15235, - "Ġintersection": 15236, - "66": 15237, - "Ġ39": 15238, - "ç³": 15239, - "Ġwaited": 15240, - "ì´": 15241, - "hhhh": 15242, - "Ġdys": 15243, - "ĠEN": 15244, - "Ġbatch": 15245, - "Ġcaf": 15246, - "Ġmarker": 15247, - "大家好": 15248, - "orable": 15249, - "ória": 15250, - "Ġstepped": 15251, - "Ġcelebrating": 15252, - "ана": 15253, - "Ġworn": 15254, - "ĠFol": 15255, - "Ġpla": 15256, - "Ġattempts": 15257, - "Ġtweet": 15258, - "Ġrust": 15259, - "gence": 15260, - "íĨµ": 15261, - "Ġrevel": 15262, - "Ġrecept": 15263, - "eness": 15264, - "Ġ((": 15265, - "ãĥ¼ãĥ": 15266, - "!âĢĭ": 15267, - "ĠìĨIJ": 15268, - "Ġinfluenced": 15269, - "иж": 15270, - "ĠконеÑĩно": 15271, - "Ġcolleges": 15272, - "ioni": 15273, - "Ġsag": 15274, - "Ann": 15275, - "olar": 15276, - "Ġexpressions": 15277, - "Ġsuits": 15278, - "Ġownership": 15279, - "eland": 15280, - "piece": 15281, - "æĢİä¹Ī": 15282, - "Ġdespués": 15283, - "Ġtel": 15284, - "Ġinsult": 15285, - "Ġêµīìŀ¥": 15286, - "ĠSmall": 15287, - "ĠFR": 15288, - "oka": 15289, - "berries": 15290, - "ĠAnton": 15291, - "елÑı": 15292, - "ÑıÑģ": 15293, - "Ġvalve": 15294, - "acts": 15295, - "Ġwoods": 15296, - "ண": 15297, - "Ġcultiv": 15298, - "Ġfá": 15299, - "ãģ¨ãģĦãģĨ": 15300, - "Ġcheers": 15301, - "Ġassumption": 15302, - "Ġfitness": 15303, - "ÃŃcul": 15304, - "Ġpodr": 15305, - "Ġweit": 15306, - "ĠHind": 15307, - "Ġdign": 15308, - "Ġзн": 15309, - "Ġsquad": 15310, - "Ġdestro": 15311, - "cere": 15312, - "shirt": 15313, - "immt": 15314, - "engers": 15315, - "Ġsä": 15316, - "kÅĤad": 15317, - "ĠÈĻ": 15318, - "Ġoccas": 15319, - "Ġì¤Ħ": 15320, - "Ġprocessor": 15321, - "ĠDM": 15322, - "ĠDaddy": 15323, - "Ġsooner": 15324, - "Ġstraightforward": 15325, - "Ġdepartments": 15326, - "ĠChrome": 15327, - "Ġworkplace": 15328, - "ĠPython": 15329, - "Ġmeng": 15330, - "ĠDAN": 15331, - "ĠIce": 15332, - "ĠëĪĪ": 15333, - "ĠGi": 15334, - "Ġhiring": 15335, - "Ġlanded": 15336, - "Ġdemocratic": 15337, - "iedz": 15338, - "ãģĺãĤĥ": 15339, - "Ġsev": 15340, - "icia": 15341, - "Ġespecial": 15342, - "ĠNous": 15343, - "Ġhät": 15344, - "Ġbou": 15345, - "pert": 15346, - "iesz": 15347, - "åijĢ": 15348, - "Ġvil": 15349, - "ÅĽli": 15350, - "Ġîn": 15351, - "Ġlosses": 15352, - "éķ·": 15353, - "Ġtoast": 15354, - "Ġrealm": 15355, - "ĠAustin": 15356, - "ĠInformation": 15357, - "Ġresume": 15358, - "Ġchase": 15359, - "Ġsalary": 15360, - "Ġë¶Ħ": 15361, - "лиÑĩ": 15362, - "ĠÑģлед": 15363, - "ĠFurther": 15364, - "Ġcaring": 15365, - "Ġvig": 15366, - "Ġvalor": 15367, - "è¿Ļ个": 15368, - "ĠÑĩа": 15369, - "Ġanalytics": 15370, - "Ġglobe": 15371, - "ĠMAN": 15372, - "Ġnel": 15373, - "ìĿ´ìķ¼": 15374, - "Ł¼": 15375, - "Ġoy": 15376, - "íķĺìĦ¸ìļĶ": 15377, - "jen": 15378, - "Ġtroubles": 15379, - "ahaha": 15380, - "Ġchurches": 15381, - "uet": 15382, - "Ġmeasurements": 15383, - "bil": 15384, - "ì½": 15385, - "ifully": 15386, - "инÑĥ": 15387, - "ĠWilson": 15388, - "¦´": 15389, - "ĠíĮĮ": 15390, - "Ġì°¨": 15391, - "Ġpúblic": 15392, - "ĠJerusalem": 15393, - "Ġnails": 15394, - "Ġspine": 15395, - "Ġhemos": 15396, - "Ġzn": 15397, - "quis": 15398, - "ĠLeben": 15399, - "Ġreferences": 15400, - "ITH": 15401, - "iper": 15402, - "ĠÑģебÑı": 15403, - "ìģ": 15404, - "ĠWa": 15405, - "state": 15406, - "§Ŀ": 15407, - "åħ±": 15408, - "ĠGener": 15409, - "Ġactress": 15410, - "ĠEnjoy": 15411, - "à¹ĥ": 15412, - "Ġ×Ĵ": 15413, - "Ġinfected": 15414, - "Ġshaking": 15415, - "Ġnick": 15416, - "ุ": 15417, - "Ġfot": 15418, - "Ġaccomplished": 15419, - "uke": 15420, - "Ġsheets": 15421, - "Ġfence": 15422, - "Ġnursing": 15423, - "Ġintroducing": 15424, - "Ġfeat": 15425, - "One": 15426, - "TO": 15427, - "Ġclubs": 15428, - "ĠBruce": 15429, - "onge": 15430, - "change": 15431, - "ĠBatman": 15432, - "åı°": 15433, - "ĠOfficer": 15434, - "Ġhydro": 15435, - "Ġsupplement": 15436, - "Ġcela": 15437, - "Ġlongest": 15438, - "Ġcompeting": 15439, - "Ġconhe": 15440, - "giving": 15441, - "Ġbrains": 15442, - "Ġloans": 15443, - "Ġwage": 15444, - "ĠClinton": 15445, - "ĠsÄĥ": 15446, - "aneous": 15447, - "Ġlord": 15448, - "ÑĢÑĥж": 15449, - "Ġquiz": 15450, - "Ġstiff": 15451, - "ĠLGB": 15452, - "sz": 15453, - "ME": 15454, - "mare": 15455, - "there": 15456, - "Ġnär": 15457, - "ĠMand": 15458, - "last": 15459, - "Ġdag": 15460, - "Ġhalfway": 15461, - "ĠBand": 15462, - "Ġëĭ¤ìĭľ": 15463, - "ĠAren": 15464, - "Ġile": 15465, - "PN": 15466, - "ento": 15467, - "Ġalgum": 15468, - "Ġsoccer": 15469, - "Ġblocked": 15470, - "ĠJonathan": 15471, - "Ġsew": 15472, - "ĠTestament": 15473, - "Ġvale": 15474, - "Ġbehavi": 15475, - "å§ĭ": 15476, - "Ġconna": 15477, - "ICH": 15478, - "Ġaudiences": 15479, - "ml": 15480, - "ammad": 15481, - "ĠìĤ´ì": 15482, - "IGH": 15483, - "Ġraces": 15484, - "emed": 15485, - "Ġmá»Ļt": 15486, - "ï": 15487, - "Ġovers": 15488, - "Ġdeclared": 15489, - "Ġsana": 15490, - "ĠUna": 15491, - "ĠÑĢе": 15492, - "ucks": 15493, - "Ġpairs": 15494, - "Ġange": 15495, - "Ne": 15496, - "Ġups": 15497, - "avy": 15498, - "ør": 15499, - "reek": 15500, - "Ġbehaviors": 15501, - "Ġreflected": 15502, - "Ġpriorities": 15503, - "Ġcondu": 15504, - "Ġretreat": 15505, - "Ġexpenses": 15506, - "Ġë´IJ": 15507, - "Ġtriple": 15508, - "Ġêµīìŀ¥íŀĪ": 15509, - "ält": 15510, - "Ġindigenous": 15511, - "Ġmining": 15512, - "Ġacceptable": 15513, - "Ġruin": 15514, - "CA": 15515, - "uine": 15516, - "Ġpipeline": 15517, - "ctic": 15518, - "êt": 15519, - "ĠвÑģего": 15520, - "Ġboun": 15521, - "ĠDigital": 15522, - "ĠBoom": 15523, - "ÑĨе": 15524, - "ĠлÑĥÑĩ": 15525, - "Ġasc": 15526, - "ĮĢë¡ľ": 15527, - "ĠGoodbye": 15528, - "Ġrender": 15529, - "enez": 15530, - "arre": 15531, - "ĠTHAT": 15532, - "bour": 15533, - "ición": 15534, - "ãĤŃ": 15535, - "Every": 15536, - "Ġwires": 15537, - "ĠParliament": 15538, - "nung": 15539, - "ateur": 15540, - "ĠSave": 15541, - "ĠPhys": 15542, - "Ġamor": 15543, - "ĠEve": 15544, - "Ġfright": 15545, - "Ġgamma": 15546, - "Ġmicros": 15547, - "mitt": 15548, - "ĠCode": 15549, - "ĠBey": 15550, - "pled": 15551, - "ĠиÑģполÑĮз": 15552, - "çĹ": 15553, - "ìĥī": 15554, - "她": 15555, - "Ġmonet": 15556, - "ĠJahre": 15557, - "Ġluxury": 15558, - "Ġdeaf": 15559, - "Ġbetray": 15560, - "Ġê²°": 15561, - "ики": 15562, - "Ġdefeated": 15563, - "Ġundert": 15564, - "Ġweg": 15565, - "Ġcooler": 15566, - "ãģķãĤĵ": 15567, - "iami": 15568, - "éĤĦæľī": 15569, - "ĠJessica": 15570, - "ĠJoy": 15571, - "Ġsophistic": 15572, - "ении": 15573, - "ðĿĺ": 15574, - "Ġchili": 15575, - "ĠType": 15576, - "Ġproteins": 15577, - "Ġpresenting": 15578, - "alia": 15579, - "ìļ¸": 15580, - "ĠMajor": 15581, - "Ġmolecule": 15582, - "umer": 15583, - "Ġcollapse": 15584, - "ĠAnyways": 15585, - "ĠMountain": 15586, - "anted": 15587, - "ãĢIJ": 15588, - "Ġвидео": 15589, - "æ°´": 15590, - "Aud": 15591, - "Ġconqu": 15592, - "Ġvoll": 15593, - "Ġknit": 15594, - "Ġmembr": 15595, - "ĠMarket": 15596, - "Ġdari": 15597, - "Ġcalculated": 15598, - "ги": 15599, - "Ġshrimp": 15600, - "ĠMu": 15601, - "ĠпÑĢоÑĤ": 15602, - "Ġìĺģìĥģ": 15603, - "Ġproductivity": 15604, - "Ġcognitive": 15605, - "ĠHeb": 15606, - "ictions": 15607, - "ê²½": 15608, - "Ġcré": 15609, - "för": 15610, - "Ġpraying": 15611, - "ashi": 15612, - "ĠTik": 15613, - "ór": 15614, - "wen": 15615, - "ÑĮÑİ": 15616, - "ixo": 15617, - "Ġ(\"": 15618, - "ĠÑĤел": 15619, - "Ġìĸ´ëĸ¤": 15620, - "ĠпеÑĢед": 15621, - "ĠDrive": 15622, - "ãĢij": 15623, - "ĠEqu": 15624, - "Ġequilibrium": 15625, - "Ġdescribes": 15626, - "нее": 15627, - "42": 15628, - "ĠCurrent": 15629, - "yy": 15630, - "Ġabsorb": 15631, - "Ġsoldier": 15632, - "ders": 15633, - "Ġtestimony": 15634, - "Ġdecline": 15635, - "ľë¡ľ": 15636, - "gage": 15637, - "Ġinspire": 15638, - "lapping": 15639, - "Ġspinning": 15640, - "Ġslavery": 15641, - "Ġfacial": 15642, - "Ġtraditions": 15643, - "ários": 15644, - "ĠHospital": 15645, - "Ġnest": 15646, - "ĠëĪĦ": 15647, - "Ġtoi": 15648, - "Ġfears": 15649, - "ìħ¨": 15650, - "ĠMuh": 15651, - "Ġgraduation": 15652, - "Ġimpacted": 15653, - "Ġaunt": 15654, - "ĠLets": 15655, - "Ġaluminum": 15656, - "Ġdominant": 15657, - "ĠDavis": 15658, - "ĠNavy": 15659, - "Ġcompt": 15660, - "oples": 15661, - "Ġestava": 15662, - "è¥": 15663, - "Ġscal": 15664, - "Ġpreserve": 15665, - "ĠOpp": 15666, - "Ġpractically": 15667, - "Ġmagnitude": 15668, - "Ġfitting": 15669, - "Ġcoordinate": 15670, - "Ġfurniture": 15671, - "ĠFamil": 15672, - "Ġexplosion": 15673, - "Ġdocumentary": 15674, - "ĠScript": 15675, - "Ġportray": 15676, - "mat": 15677, - "Ġscheduled": 15678, - "Ġdynamics": 15679, - "phy": 15680, - "aky": 15681, - "ĠUI": 15682, - "Che": 15683, - "Ġcontinuously": 15684, - "ĠProv": 15685, - "å°ij": 15686, - "Ñĥз": 15687, - "rah": 15688, - "Ġgerne": 15689, - "proof": 15690, - "Ġsecretary": 15691, - "ĠPatreon": 15692, - "scream": 15693, - "ĠKids": 15694, - "á»ĵi": 15695, - "Ġkg": 15696, - "Ġuncertainty": 15697, - "Ġкажд": 15698, - "Ġmitig": 15699, - "Ġreads": 15700, - "å·²": 15701, - "ĠRu": 15702, - "Ġpriest": 15703, - "Ġнед": 15704, - "Ġlimitations": 15705, - "Ġfloat": 15706, - "600": 15707, - "ĠToy": 15708, - "ĠJimmy": 15709, - "Ġoffensive": 15710, - "eni": 15711, - "ĠXi": 15712, - "Ġeyebr": 15713, - "ĠTurk": 15714, - "Ġaccidentally": 15715, - "Ġohne": 15716, - "ĠSaud": 15717, - "95": 15718, - "ĠDutch": 15719, - "анÑģ": 15720, - "ĠSeattle": 15721, - "Ġëĵ±": 15722, - "check": 15723, - "kÄĻ": 15724, - "Ġcontributions": 15725, - "Ġbeside": 15726, - "Ġquindi": 15727, - "Ġflew": 15728, - "æŶ": 15729, - "ذا": 15730, - "ĠLO": 15731, - "Ġwaist": 15732, - "ĠEV": 15733, - "Ġholidays": 15734, - "jon": 15735, - "Ġmisunder": 15736, - "Ñıн": 15737, - "Ġbout": 15738, - "Ġdimin": 15739, - "ẽ": 15740, - "ól": 15741, - "ĠGrace": 15742, - "Ġinputs": 15743, - "Ġdeny": 15744, - "Ġforming": 15745, - "ĠBild": 15746, - "Ġadequ": 15747, - "Ġfolk": 15748, - "Ġrejected": 15749, - "semb": 15750, - "Ġfrustrated": 15751, - "open": 15752, - "ĠBetter": 15753, - "ilon": 15754, - "Ġtowel": 15755, - "Ġdifferential": 15756, - "Ġsacred": 15757, - "Ġsail": 15758, - "éĩĮ": 15759, - "entimes": 15760, - "Ġgentleman": 15761, - "Ġiconic": 15762, - "Ġcomparing": 15763, - "Ġsagt": 15764, - "Ġtexts": 15765, - "Ġgrandma": 15766, - "Ġrolls": 15767, - "Ġcontents": 15768, - "ä¸į好": 15769, - "оÑģÑģ": 15770, - "Ġsuspension": 15771, - "roit": 15772, - "¦¼": 15773, - "Ġassez": 15774, - "Ġdort": 15775, - "ĠMath": 15776, - "ĠVictor": 15777, - "ĠJavaScript": 15778, - "ä¸įå°į": 15779, - "Ġenhan": 15780, - "ÅĻ": 15781, - "ĠBush": 15782, - "Ġpromotion": 15783, - "Ġkin": 15784, - "Ġmonsters": 15785, - "ĠColorado": 15786, - "Ġβ": 15787, - "íķ´ìļĶ": 15788, - "æŃ£": 15789, - "ifferent": 15790, - "Ġnaked": 15791, - "Ġprod": 15792, - "etics": 15793, - "ĠWoman": 15794, - "Ġtreatments": 15795, - "Ġestoy": 15796, - "vé": 15797, - "Ġlifting": 15798, - "Ġyapt": 15799, - "ĠRober": 15800, - "Ġì¹ľ": 15801, - "Ġsubstitute": 15802, - "aku": 15803, - "ridge": 15804, - "Ġê±°ë": 15805, - "Ġresponded": 15806, - "Ġbé": 15807, - "ĠEngineer": 15808, - "Ġtransferred": 15809, - "ë²": 15810, - "Ġhaber": 15811, - "oop": 15812, - "ĠWE": 15813, - "Ġvest": 15814, - "Ġforty": 15815, - "ĠDS": 15816, - "Ġ2004": 15817, - "Ġcoaching": 15818, - "nom": 15819, - "ĠBab": 15820, - "Ġnossa": 15821, - "ĠJake": 15822, - "Ġgy": 15823, - "Ġdeleg": 15824, - "Ġìŀł": 15825, - "ĠкÑĢаÑģ": 15826, - "Ġstandpoint": 15827, - "Ġdisad": 15828, - "Ġartwork": 15829, - "Ad": 15830, - "illo": 15831, - "ĠÄijược": 15832, - "ĠProm": 15833, - "ĠLib": 15834, - "Ġcriticism": 15835, - "Ġcontacts": 15836, - "ÑĢам": 15837, - "Ġachievement": 15838, - "ÐĶа": 15839, - "Ġdissol": 15840, - "ĠVegas": 15841, - "Ġstreams": 15842, - "ĠKent": 15843, - "ĠعÙĦÙī": 15844, - "Ġradius": 15845, - "Ġsucks": 15846, - "ĠAch": 15847, - "Ġfi": 15848, - "oust": 15849, - "ĠлÑİди": 15850, - "Ġpalette": 15851, - "ĠHaz": 15852, - "ĠAnthony": 15853, - "Ġtema": 15854, - "ĠCos": 15855, - "Ġsafer": 15856, - "αÏĤ": 15857, - "Ġcontrad": 15858, - "Ġmaior": 15859, - "Ġinflation": 15860, - "ĠSilver": 15861, - "Ġattending": 15862, - "íķľíħĮ": 15863, - "arto": 15864, - "Ġapplauding": 15865, - "Ġcomputing": 15866, - "ĠHat": 15867, - "æ»": 15868, - "know": 15869, - "makers": 15870, - "Ġconoc": 15871, - "Ġeducated": 15872, - "Ġmodified": 15873, - "Ġinclusion": 15874, - "mental": 15875, - "ŀIJ": 15876, - "isia": 15877, - "ĠÏĢοÏħ": 15878, - "Ġaun": 15879, - "ĠIreland": 15880, - "Ġkö": 15881, - "Ġcompliance": 15882, - "Ġinspiring": 15883, - "иÑĤелÑĮно": 15884, - "Ġdispos": 15885, - "ì°¨": 15886, - "Ġwip": 15887, - "rical": 15888, - "rawd": 15889, - "Ġtres": 15890, - "Ġmobil": 15891, - "olutions": 15892, - "BO": 15893, - "Ġbounce": 15894, - "Ġassumed": 15895, - "ĠMedical": 15896, - "Ġfiscal": 15897, - "ĠngÆ°á»Ŀi": 15898, - "itionally": 15899, - "Ġstolen": 15900, - "ĠBM": 15901, - "Ġmechanisms": 15902, - "εί": 15903, - "Ġqualified": 15904, - "ĠìŀIJë": 15905, - "ughters": 15906, - "ĠHIV": 15907, - "ĠLots": 15908, - "Ġservers": 15909, - "Ġcarr": 15910, - "ĠTogether": 15911, - "Ġattracted": 15912, - "Ġkr": 15913, - "æĪijæĺ¯": 15914, - "thur": 15915, - "inin": 15916, - "ĠHalf": 15917, - "ÈĽ": 15918, - "ĠPap": 15919, - "Ġreminded": 15920, - "ALL": 15921, - "Ġhelmet": 15922, - "Ġbottles": 15923, - "Ġprofessors": 15924, - "Ġseine": 15925, - "ÅĤÄħ": 15926, - "ãĥı": 15927, - "Ġê±°ìķ¼": 15928, - "Ġ×¢×ľ": 15929, - "fun": 15930, - "ĠBird": 15931, - "Ġfighter": 15932, - "ĠëĶ°ë": 15933, - "ĠTool": 15934, - "Ġtin": 15935, - "inois": 15936, - "ë¶Ħ": 15937, - "×Ļף": 15938, - "ĠCAR": 15939, - "åIJį": 15940, - "irsty": 15941, - "Ġoutdoor": 15942, - "ĠNS": 15943, - "ãħİ": 15944, - "ffen": 15945, - "Ġlud": 15946, - "Hello": 15947, - "Ġroller": 15948, - "iele": 15949, - "ĠPoland": 15950, - "Ġapa": 15951, - "exp": 15952, - "Ġcertificate": 15953, - "ĠTown": 15954, - "аÑİÑĤÑģÑı": 15955, - "ilde": 15956, - "Ġdetermin": 15957, - "PR": 15958, - "Ġfreeze": 15959, - "Ġmainstream": 15960, - "Ġobjectives": 15961, - "blo": 15962, - "Ġtakie": 15963, - "åĵĪåĵĪ": 15964, - "Ġë°Ķë¡ľ": 15965, - "elet": 15966, - "ĠIV": 15967, - "ĠFast": 15968, - "Ġdere": 15969, - "emp": 15970, - "ĠDra": 15971, - "ĠìŀĪìĹĪ": 15972, - "Ġdiscrimination": 15973, - "Ġείναι": 15974, - "necess": 15975, - "æ®": 15976, - "ıģı": 15977, - "Ġposting": 15978, - "wiÅĽcie": 15979, - "Ġlub": 15980, - "Ġolive": 15981, - "Ġrim": 15982, - "Ġmodeling": 15983, - "Ġaño": 15984, - "ĠPakistan": 15985, - "Ġoverl": 15986, - "Ġinflam": 15987, - "NE": 15988, - "ìĹIJê²Į": 15989, - "Ġattended": 15990, - "Ġdealt": 15991, - "ĠAlt": 15992, - "ĠLincoln": 15993, - "Ġawake": 15994, - "Ġfilters": 15995, - "ĠWithin": 15996, - "czywiÅĽcie": 15997, - "Ġsû": 15998, - "ĠJohnny": 15999, - "Ġintegrity": 16000, - "Ġisolation": 16001, - "ĠEasy": 16002, - "ĠпÑĢин": 16003, - "ĠAlice": 16004, - "Ġsmiling": 16005, - "enix": 16006, - ",...": 16007, - "ζ": 16008, - "Ġbegun": 16009, - "Ġjewel": 16010, - "Ġconventional": 16011, - "Ġstatist": 16012, - "Ġhanded": 16013, - "Ġirre": 16014, - "Ġprohib": 16015, - "Ġsatellite": 16016, - "é¦Ļ": 16017, - "ĠIndust": 16018, - "Ġtraged": 16019, - "Ġtrava": 16020, - "Ġihm": 16021, - "Ġcruel": 16022, - "ĠAgora": 16023, - "ĠDoc": 16024, - "Ġzones": 16025, - "Ġmall": 16026, - "Ġtray": 16027, - "×ķ׳": 16028, - "Ġirrit": 16029, - "Ġkans": 16030, - "ĠBeat": 16031, - "udge": 16032, - "ielle": 16033, - "Ġtrusted": 16034, - "Ġbikes": 16035, - "ĠÑĥп": 16036, - "ĠMember": 16037, - "wick": 16038, - "Ġcreators": 16039, - "Ġheritage": 16040, - "indistinct": 16041, - "Ġresur": 16042, - "ennen": 16043, - "Come": 16044, - "Ġfiring": 16045, - "ĠBueno": 16046, - "ĠТо": 16047, - "ikan": 16048, - "ettes": 16049, - "Ġkes": 16050, - "Ġtrips": 16051, - "Ġdivorce": 16052, - "ĠKl": 16053, - "Ġconsol": 16054, - "keep": 16055, - "기ê°Ģ": 16056, - "ĠReport": 16057, - "Ġhosting": 16058, - "Ġdiamond": 16059, - "Ġcomplic": 16060, - "Ġhelicop": 16061, - "Ġdepuis": 16062, - "ds": 16063, - "ĠChan": 16064, - "Ñıл": 16065, - "Ġscissors": 16066, - "ilation": 16067, - "Ġproportion": 16068, - "ERE": 16069, - "ĠÙĪاÙĦ": 16070, - "inta": 16071, - "Ġmuchas": 16072, - "uation": 16073, - "itis": 16074, - "æĬĬ": 16075, - "ÑıÑī": 16076, - "Ġniin": 16077, - "Ġemphasize": 16078, - "uela": 16079, - "Ġproducers": 16080, - "Ġrze": 16081, - "änder": 16082, - "ETH": 16083, - "æº": 16084, - "Ġconstitu": 16085, - "åĽ½": 16086, - "Ġperformances": 16087, - "istle": 16088, - "gov": 16089, - "ĠLiter": 16090, - "Ġincorporate": 16091, - "Ġeducate": 16092, - "ĠNin": 16093, - "쪽": 16094, - "ÙĩÙħ": 16095, - "eleration": 16096, - "×ķ×ij": 16097, - "ĠyaÅŁ": 16098, - "orous": 16099, - "ĠCas": 16100, - "Ġgrants": 16101, - "ëĬ¥": 16102, - "amel": 16103, - "Ġê·¸ëłĩê²Į": 16104, - "ĠEste": 16105, - "ÑħодиÑĤ": 16106, - "ĠпоÑģле": 16107, - "Ġgent": 16108, - "Ġfocuses": 16109, - "alities": 16110, - "ĠRh": 16111, - "ë³´": 16112, - "æ°ij": 16113, - "ĠDance": 16114, - "rr": 16115, - "Ġamer": 16116, - "Ġutilize": 16117, - "ĠlÃŃ": 16118, - "ĠAmong": 16119, - "Ġpregnancy": 16120, - "Ġloops": 16121, - "алоÑģÑĮ": 16122, - "ĠMoh": 16123, - "Ġcatching": 16124, - "Ġglob": 16125, - "Ġajud": 16126, - "Ġ[?": 16127, - "ĠAnal": 16128, - "looking": 16129, - "Ġsurfaces": 16130, - "Ġprogressive": 16131, - "Ġviral": 16132, - "08": 16133, - "ξ": 16134, - "KA": 16135, - "Ġży": 16136, - "Ġpicks": 16137, - "annon": 16138, - "Ġbulk": 16139, - "ĠRoss": 16140, - "Ġdescribing": 16141, - "ĠGel": 16142, - "Ġlocally": 16143, - "Ġendless": 16144, - "Ġmassage": 16145, - "Ġcleaned": 16146, - "Ġtraveled": 16147, - "енÑĭ": 16148, - "Ġsentiment": 16149, - "igma": 16150, - "ĠNas": 16151, - "Ġchemicals": 16152, - "Ġrighteous": 16153, - "ĠMagic": 16154, - "Ġrelates": 16155, - "Ġtrucks": 16156, - "Ġ1960": 16157, - "åĪ¥": 16158, - "Ġappet": 16159, - "Ġsnacks": 16160, - "ĠSummer": 16161, - "Ġyüz": 16162, - "Ġpris": 16163, - "ĠMexican": 16164, - "Ġtransparen": 16165, - "Ġminority": 16166, - "Ġverte": 16167, - "Ġlassen": 16168, - "46": 16169, - "лек": 16170, - "ép": 16171, - "ĠÑĦилÑĮ": 16172, - "Ġiyi": 16173, - "Ġspan": 16174, - "íķĺì§Ģ": 16175, - "Ġindicated": 16176, - "quar": 16177, - "Ġscholarship": 16178, - "ĠLGBT": 16179, - "Ġhistorically": 16180, - "óÅĤ": 16181, - "Ġminist": 16182, - "Ġpenet": 16183, - "ĠRap": 16184, - "Ġconservation": 16185, - "缴": 16186, - "ĠHoney": 16187, - "ĠBei": 16188, - "idel": 16189, - "Ġresponsibilities": 16190, - "Ġmessy": 16191, - "ĠExcept": 16192, - "ORE": 16193, - "Ġinitiatives": 16194, - "Ġjunior": 16195, - "Ġdesigners": 16196, - "Ġexploration": 16197, - "Ġsponsor": 16198, - "Ġmobility": 16199, - "Ġinteg": 16200, - "lando": 16201, - "Ġbark": 16202, - "Ġindicates": 16203, - "à¶": 16204, - "Ġemployer": 16205, - "å®ī": 16206, - "Ġcousin": 16207, - "Ġboiling": 16208, - "Ġchrom": 16209, - "Ġçal": 16210, - "Ġperpet": 16211, - "Ġcontained": 16212, - "Ġparks": 16213, - "Ы": 16214, - "ĠEngineering": 16215, - "Please": 16216, - "ĠStarting": 16217, - "hero": 16218, - "Ġlawyers": 16219, - "西": 16220, - "Ġzd": 16221, - "Ġfranchise": 16222, - "rage": 16223, - "Ġintuit": 16224, - "ĠGL": 16225, - "reach": 16226, - "ĠElle": 16227, - "ĠnhÆ°": 16228, - "ĠNord": 16229, - "Ġbean": 16230, - "07": 16231, - "Ġpleasant": 16232, - "å½ĵ": 16233, - "viron": 16234, - "Ġgradient": 16235, - "zus": 16236, - "ĠEM": 16237, - "Ġessay": 16238, - "ìĹIJìļĶ": 16239, - "ến": 16240, - "nu": 16241, - "ừ": 16242, - "ĠÃīs": 16243, - "Ġdenomin": 16244, - "ĠGirls": 16245, - "Ġpersonnes": 16246, - "ĠاÙĦØ£": 16247, - "bild": 16248, - "ĠStat": 16249, - "Ġcompliment": 16250, - "ĠKate": 16251, - "Ġoptimal": 16252, - "Ġhid": 16253, - "دÙĬ": 16254, - "Ġquicker": 16255, - "wall": 16256, - "En": 16257, - "INE": 16258, - "???": 16259, - "ì²´": 16260, - "ĠAction": 16261, - "åŁ": 16262, - "Ġpenalty": 16263, - "ĠKaz": 16264, - "'?": 16265, - "Ġcried": 16266, - "Ġcanvas": 16267, - "fte": 16268, - "Ġexclud": 16269, - "¸ë¡ľ": 16270, - "Ġemphasis": 16271, - "Ġenzy": 16272, - "ĠHou": 16273, - "Ġoverseas": 16274, - "ÃŃamos": 16275, - "師": 16276, - "öglich": 16277, - "Ġheadphones": 16278, - "cn": 16279, - "ĠAge": 16280, - "Ġakan": 16281, - "Ġcharacteristic": 16282, - "íķĺë©´": 16283, - "gets": 16284, - "Ġë¶Ī": 16285, - "Ġrival": 16286, - "Ġborders": 16287, - "emente": 16288, - "emás": 16289, - "Ġyol": 16290, - "Ġcompe": 16291, - "enders": 16292, - "ından": 16293, - "Ġmöglich": 16294, - "Ġbubbles": 16295, - "natural": 16296, - "Ġarmed": 16297, - "Ġelabor": 16298, - "ĠìĿ´ë²Ī": 16299, - "Ġwashed": 16300, - "οÏħμε": 16301, - "è«ĭ": 16302, - "Ġflavors": 16303, - "Ġexiste": 16304, - "Ġprest": 16305, - "ĠThema": 16306, - "опÑĢоÑģ": 16307, - "eron": 16308, - "UE": 16309, - "eri": 16310, - "Ġconcer": 16311, - "Ġaixò": 16312, - "åħ©": 16313, - "Ġprotective": 16314, - "ĠзнаÑİ": 16315, - "ĠëĤł": 16316, - "ĠIII": 16317, - "Ġmeer": 16318, - "ĠShop": 16319, - "lli": 16320, - "ĠOrder": 16321, - "ĠMY": 16322, - "ĠGhost": 16323, - "ãĤĤãģĨ": 16324, - "adel": 16325, - "Ġstole": 16326, - "Ġreleasing": 16327, - "ĠComment": 16328, - "Ġtrains": 16329, - "ëªħ": 16330, - "Ġwissen": 16331, - "ensed": 16332, - "Ġdescend": 16333, - "Ġfier": 16334, - "Ġradi": 16335, - "Ġpersu": 16336, - "ç¢": 16337, - "Ġмн": 16338, - "ĠDest": 16339, - "Ġworries": 16340, - "itet": 16341, - "bas": 16342, - "Ġstab": 16343, - "name": 16344, - "oric": 16345, - "ĠClose": 16346, - "Ġalumni": 16347, - "ĠSelf": 16348, - "ffe": 16349, - "itating": 16350, - "atherine": 16351, - "ĠRights": 16352, - "Ġellos": 16353, - "Ġwarrant": 16354, - "Ġnerve": 16355, - "Ġvegetable": 16356, - "ĠTeil": 16357, - "Ġê°ĻìĿ´": 16358, - "RY": 16359, - "Ġsustainability": 16360, - "Ġsteht": 16361, - "Ġbrid": 16362, - "adaÅŁ": 16363, - "Ġtv": 16364, - "Ġduration": 16365, - "Ġpessoa": 16366, - "Ġmetrics": 16367, - "Ġadam": 16368, - "cas": 16369, - "аÑĢи": 16370, - "Ġevident": 16371, - "Ġdisplayed": 16372, - "ائ": 16373, - "Ġreck": 16374, - "ĠBuddha": 16375, - "Ġdele": 16376, - "ĠDiego": 16377, - "osph": 16378, - "Ġbla": 16379, - "ĠMik": 16380, - "ulator": 16381, - "Ġ2001": 16382, - "Ġpromoting": 16383, - "ych": 16384, - "ĠEX": 16385, - "Ġlastly": 16386, - "Ġoutline": 16387, - "Ġspirits": 16388, - "Ġveux": 16389, - "Ġsubtract": 16390, - "ĠÅŁimdi": 16391, - "Ġpins": 16392, - "Ġburger": 16393, - "Ġmolto": 16394, - "ĠhabÃŃa": 16395, - "Ġë°ĺ": 16396, - "igu": 16397, - "erst": 16398, - "Ġnen": 16399, - "Ġbacon": 16400, - "itious": 16401, - "Ġcarries": 16402, - "Ġpromises": 16403, - "nde": 16404, - "ĠLeft": 16405, - "ĠLim": 16406, - "æ£": 16407, - "Ġ44": 16408, - "Ġcareers": 16409, - "Ġ주ë": 16410, - "Ġspeeds": 16411, - "qué": 16412, - "mad": 16413, - "market": 16414, - "isme": 16415, - "Ġ2003": 16416, - "Ġrecess": 16417, - "ĠJUD": 16418, - "Ġracist": 16419, - "ĠSchl": 16420, - "Ġparler": 16421, - "Ġotros": 16422, - "ishes": 16423, - "Ġconverted": 16424, - "aaaa": 16425, - "ании": 16426, - "ĠArk": 16427, - "ĠChance": 16428, - "Ġelementary": 16429, - "εν": 16430, - "inks": 16431, - "Interviewer": 16432, - "Ġfreely": 16433, - "alah": 16434, - "Ġëĭ¤ë¥¸": 16435, - "Ġrequested": 16436, - "Ġtorque": 16437, - "noÅĽci": 16438, - "oured": 16439, - "ĠStaff": 16440, - "Ġstain": 16441, - "ĠAlan": 16442, - "Ġvere": 16443, - "ĠWinter": 16444, - "Ġdefect": 16445, - "iedy": 16446, - "Ġbeats": 16447, - "Ġhá": 16448, - "umn": 16449, - "oons": 16450, - "itudes": 16451, - "Ġseit": 16452, - "oly": 16453, - "Ġreserv": 16454, - "Ġextr": 16455, - "Ġphysician": 16456, - "visor": 16457, - "Ġhandful": 16458, - "ĠNations": 16459, - "Ġì¢ĭìĿĢ": 16460, - "uccess": 16461, - "Ġupstairs": 16462, - "ĠSquare": 16463, - "Ġhein": 16464, - "ĠSeason": 16465, - "olis": 16466, - "Ġprince": 16467, - "Ġdefensive": 16468, - "ç½": 16469, - "ĠмеÑģÑĤ": 16470, - "Ñĸй": 16471, - "ĠاÙĨ": 16472, - "umble": 16473, - "ê¹ĮìļĶ": 16474, - "Ġassass": 16475, - "Ġcircular": 16476, - "Ġqualities": 16477, - "Ġhmm": 16478, - "Ġblown": 16479, - "ĠLiz": 16480, - "ĠKur": 16481, - "ĠSA": 16482, - "Ġfindings": 16483, - "Ġcolours": 16484, - "Ġdelle": 16485, - "ĠIR": 16486, - "ĠAth": 16487, - "ĠDub": 16488, - "ĠOx": 16489, - "ĠØ®": 16490, - "Ġpockets": 16491, - "Ġgrill": 16492, - "Ġswitching": 16493, - "Ġpreferred": 16494, - "ĠWales": 16495, - "Ġexemplo": 16496, - "Ġchopped": 16497, - "Ġvaccination": 16498, - "Ġneuro": 16499, - "Ġspecify": 16500, - "ivos": 16501, - "Ġserá": 16502, - "Ġzie": 16503, - "Ġà®®": 16504, - "Ġresulting": 16505, - "ĠUgh": 16506, - "Ġmessed": 16507, - "CD": 16508, - "Ġpaar": 16509, - "Ġcomer": 16510, - "Ġcouch": 16511, - "ĠFestival": 16512, - "Ġ49": 16513, - "vous": 16514, - "zens": 16515, - "種": 16516, - "ĠKennedy": 16517, - "ĠTs": 16518, - "Ġë³´ìĹ": 16519, - "Ġdemonstration": 16520, - "Ġunto": 16521, - "Ġfrustrating": 16522, - "Ġlaboratory": 16523, - "Ġegy": 16524, - "Ġbeautifully": 16525, - "Ġìŀ¬ë": 16526, - "Ġalgu": 16527, - "Ġöyle": 16528, - "ä½łçľĭ": 16529, - "ĠPH": 16530, - "Ġfortune": 16531, - "Ġcleaner": 16532, - "ĠRobin": 16533, - "Ġsaus": 16534, - "ĠGeld": 16535, - "Ġkat": 16536, - "obs": 16537, - "Ġolur": 16538, - "Ġmatt": 16539, - "Ġquesta": 16540, - "Ġsuggestion": 16541, - "encer": 16542, - "оÑģÑĤ": 16543, - "Ġradar": 16544, - "Ġìŀ¡": 16545, - "isha": 16546, - "ந": 16547, - "ãĤĵãģª": 16548, - "jes": 16549, - "Ġveel": 16550, - "ìĤ°": 16551, - "Ġauthors": 16552, - "ãĢİ": 16553, - "plan": 16554, - "Ġcollaborative": 16555, - "Ġinstinct": 16556, - "Ġfarming": 16557, - "auge": 16558, - "Edu": 16559, - "Ġmembership": 16560, - "Ġsimultaneously": 16561, - "Ġbake": 16562, - "Ġkä": 16563, - "Ġlectures": 16564, - "ÑĩеÑģ": 16565, - "Ġprendre": 16566, - "Ġcollaps": 16567, - "ĠSaya": 16568, - "ĠFut": 16569, - "Ġyog": 16570, - "ĠRather": 16571, - "رÙĬ": 16572, - "Ġcamps": 16573, - "олод": 16574, - "Ġsimulation": 16575, - "ĠMak": 16576, - "Laughs": 16577, - "Ġgrey": 16578, - "Ġsentences": 16579, - "yen": 16580, - "ĠUnless": 16581, - "Je": 16582, - "ĠSatan": 16583, - "ĠÑĤакже": 16584, - "ĠNA": 16585, - "Ġbron": 16586, - "Ġ?]": 16587, - "Ġsouls": 16588, - "Ġlightning": 16589, - "Ġimagined": 16590, - "Ġczyli": 16591, - "psilon": 16592, - "etta": 16593, - "Ġbelieving": 16594, - "Ġstrongest": 16595, - "ĠCON": 16596, - "Ġquelques": 16597, - "Ġimmigrants": 16598, - "Ġwallet": 16599, - "éĢĻæĺ¯": 16600, - "ĠJersey": 16601, - "Ġimplications": 16602, - "Ġforb": 16603, - "ãĢı": 16604, - "Ġunbelievable": 16605, - "اء": 16606, - "Ġoperational": 16607, - "üs": 16608, - "ĠGM": 16609, - "Ġê·¸ëŁ°ëį°": 16610, - "Ġgracias": 16611, - "Ġentend": 16612, - "ĠRegard": 16613, - "rob": 16614, - "ĠÑĤеÑħ": 16615, - "èı": 16616, - "ĠRevolution": 16617, - "Ġwaar": 16618, - "ĠBiz": 16619, - "theless": 16620, - "Ġsponsored": 16621, - "quier": 16622, - "ĠìĿ¼ë": 16623, - "Ġtek": 16624, - "ĠëIJł": 16625, - "igkeit": 16626, - "ĠLuck": 16627, - "ĠCertainly": 16628, - "Ġtoll": 16629, - "ĠниÑĩего": 16630, - "ĠMoney": 16631, - "ĠÑģÑĤоÑĢ": 16632, - "ĠDouble": 16633, - "ĠWolf": 16634, - "Ġchunk": 16635, - "άν": 16636, - "ités": 16637, - "oning": 16638, - "Mar": 16639, - "Ġgrandes": 16640, - "Ġcollections": 16641, - "ĠEuropa": 16642, - "ĠаÑĢ": 16643, - "ĠâĢĭâĢĭâĢĭ": 16644, - "Ġê·¸ëŁ¬ë©´": 16645, - "ĠобÑĬ": 16646, - "Ġãģª": 16647, - "Ġìĭľê°Ħ": 16648, - "ĠCustom": 16649, - "Ġì²ĺ": 16650, - "ÑĸлÑĮ": 16651, - "Ġindividually": 16652, - "íĹ": 16653, - "Ġdozen": 16654, - "Ġowe": 16655, - "ĠVictoria": 16656, - "åı¯èĥ½": 16657, - "Ġbeet": 16658, - "urb": 16659, - "Ġanalog": 16660, - "ição": 16661, - "Ĥľ": 16662, - "soever": 16663, - "Ġmodo": 16664, - "Ġsubscribed": 16665, - "ìŀ¬": 16666, - "Ġentities": 16667, - "çīĩ": 16668, - "Ġcloset": 16669, - "Ġresponding": 16670, - "Ġprinter": 16671, - "ĠStephan": 16672, - "ĠbyÅĤ": 16673, - "ĠDom": 16674, - "ĠFern": 16675, - "ĠPier": 16676, - "ĠwiÄĻc": 16677, - "Ġhence": 16678, - "Ġmodules": 16679, - "ãĥ¬": 16680, - "ĠëĶ±": 16681, - "ĠDanny": 16682, - "ĠÑģебе": 16683, - "Ġvad": 16684, - "ĠìĹĦ": 16685, - "Ġsous": 16686, - "Ġsphere": 16687, - "BY": 16688, - "ĠPed": 16689, - "igned": 16690, - "Ġwheat": 16691, - "Ġunders": 16692, - "Ġevolve": 16693, - "Ġdeclar": 16694, - "Ġlightly": 16695, - "Ġidentifying": 16696, - "æĦıæĢĿ": 16697, - "Ġlegendary": 16698, - "Ġgenuine": 16699, - "Ġgrind": 16700, - "ĠUne": 16701, - "geben": 16702, - "Ġbicy": 16703, - "Ġjumps": 16704, - "Ġprovince": 16705, - "ziÄĻ": 16706, - "Ġ×IJ׳×Ļ": 16707, - "Ġhoc": 16708, - "Ġбл": 16709, - "ĠGrad": 16710, - "Ġrevenge": 16711, - "ĠاÙĦت": 16712, - "ooh": 16713, - "æĭľ": 16714, - "аÑĨии": 16715, - "å¹³": 16716, - "Ġelectro": 16717, - "ĠëIJIJ": 16718, - "ãģ§ãģ¯": 16719, - "Ġfals": 16720, - "riel": 16721, - "oker": 16722, - "ĠExcellent": 16723, - "ĠMorgan": 16724, - "Ġbrick": 16725, - "Ġsubstantial": 16726, - "Ġpollution": 16727, - "ĠTür": 16728, - "ĠEvet": 16729, - "Ġlung": 16730, - "ãģĸ": 16731, - "×Ļש": 16732, - "ommes": 16733, - "Ġrealizing": 16734, - "Ġhumble": 16735, - "ĠLock": 16736, - "Ġbod": 16737, - "Ġìĸ¸": 16738, - "Ġpeers": 16739, - "uzz": 16740, - "Ġembedded": 16741, - "Ġclaro": 16742, - "Ġaggreg": 16743, - "Ġemployers": 16744, - "ĠRaj": 16745, - "Ġãģ¨": 16746, - "ĠYi": 16747, - "Ġjeu": 16748, - "aters": 16749, - "Ġstrikes": 16750, - "nos": 16751, - "autres": 16752, - "dr": 16753, - "opher": 16754, - "ĠApparently": 16755, - "íĺĦ": 16756, - "Ġinfant": 16757, - "اب": 16758, - "ÑĤÑĭ": 16759, - "íĽ": 16760, - "Ú¯": 16761, - "Ġredes": 16762, - "acaģım": 16763, - "ĠDAVID": 16764, - "ĠChicken": 16765, - "Ġperspectives": 16766, - "Ġviewer": 16767, - "Ġshar": 16768, - "ĠпÑĢоиз": 16769, - "ligt": 16770, - "eros": 16771, - "itable": 16772, - "илоÑģÑĮ": 16773, - "ĠdifÃŃ": 16774, - "´ëį°": 16775, - "Ġretired": 16776, - "Ġthats": 16777, - "zenie": 16778, - "beiten": 16779, - "Ġmycket": 16780, - "ĠRab": 16781, - "Ġinflamm": 16782, - "ì°®": 16783, - "Ġdum": 16784, - "Ġdaddy": 16785, - "æľŁ": 16786, - "Ġimmers": 16787, - "Ġplaylist": 16788, - "à¯Ĩ": 16789, - "Ġtraum": 16790, - "Ġrefuse": 16791, - "step": 16792, - "à®ļ": 16793, - "cup": 16794, - "Ġpops": 16795, - "rimin": 16796, - "ayım": 16797, - "Ġald": 16798, - "Ġunnecess": 16799, - "Ġdah": 16800, - "ĠIrish": 16801, - "Ġcompr": 16802, - "laÅŁ": 16803, - "TP": 16804, - "Ġtranslated": 16805, - "Sc": 16806, - "ceÄŁim": 16807, - "´IJ": 16808, - "Ġdrei": 16809, - "ĠлÑİдей": 16810, - "Ġquiero": 16811, - "Ġhele": 16812, - "zlich": 16813, - "Ġapples": 16814, - "Ġdistricts": 16815, - "Ġcredits": 16816, - "Ġasp": 16817, - "Ġëĭ¨": 16818, - "oral": 16819, - "å½±": 16820, - "Ġstepping": 16821, - "ĠVa": 16822, - "Ġgains": 16823, - "65": 16824, - "Ġnuestra": 16825, - "eday": 16826, - "assador": 16827, - "ĠLind": 16828, - "Ġcrops": 16829, - "ciendo": 16830, - "igue": 16831, - "Ġbana": 16832, - "Am": 16833, - "Ġpent": 16834, - "Ġaddiction": 16835, - "Ġpackaging": 16836, - "äd": 16837, - "ª¨": 16838, - "Ġperquè": 16839, - "Ġcampaigns": 16840, - "Ġsteep": 16841, - "Ġneue": 16842, - "Ġembarrassed": 16843, - "Ġdistinction": 16844, - "itzer": 16845, - "åijĬ": 16846, - "Ġregistration": 16847, - "Ġllam": 16848, - "ĠAlmighty": 16849, - "liest": 16850, - "Ġuz": 16851, - "nak": 16852, - "çº": 16853, - "Ġteraz": 16854, - "iamente": 16855, - "Ġtransactions": 16856, - "Ġcôt": 16857, - "Ġswitched": 16858, - "Ġcombo": 16859, - "Ġprayers": 16860, - "Ġinternship": 16861, - "Ġaddresses": 16862, - "Ġcharity": 16863, - "ĠWOO": 16864, - "Ġbait": 16865, - "è¿ĩ": 16866, - "Ġ�": 16867, - "Ġfica": 16868, - "ĠTyler": 16869, - "aru": 16870, - "Ġatoms": 16871, - "ĠLevel": 16872, - "ĠпоÑĤом": 16873, - "Ġfame": 16874, - "ulk": 16875, - "Ġteaches": 16876, - "Ġrebuild": 16877, - "едÑĮ": 16878, - "ĠIndonesia": 16879, - "ushi": 16880, - "ĠShort": 16881, - "Ġensuring": 16882, - "fs": 16883, - "ele": 16884, - "Ġmarginal": 16885, - "Ġconclude": 16886, - "amt": 16887, - "Ġverify": 16888, - "ĠMcDonald": 16889, - "Ġskal": 16890, - "Ġreconst": 16891, - "ĠMann": 16892, - "Ġbasement": 16893, - "Ġtransformed": 16894, - "Ġoccasionally": 16895, - "zone": 16896, - "ĠDans": 16897, - "Ġкакой": 16898, - "Ġdiagnosed": 16899, - "ĠÏĦα": 16900, - "Ġcommands": 16901, - "Ġpresidential": 16902, - "Ġabb": 16903, - "Ġbracket": 16904, - "ĠLem": 16905, - "Ã¥ng": 16906, - "Ġfavorites": 16907, - "Ġrevol": 16908, - "ĠíĬ¹": 16909, - "Ġharass": 16910, - "éħ": 16911, - "Ġcleans": 16912, - "ständ": 16913, - "Ġknocked": 16914, - "Ġpeoples": 16915, - "Ġmusicians": 16916, - "Ġmutual": 16917, - "ĠCold": 16918, - "88": 16919, - "zej": 16920, - "atie": 16921, - "ĠHonor": 16922, - "Ġobsessed": 16923, - "ĠMUSIC": 16924, - "ĠBreak": 16925, - "úng": 16926, - "Ġmodify": 16927, - "Ġsöyle": 16928, - "Ġ×ŀ×Ķ": 16929, - "ĠOnline": 16930, - "fo": 16931, - "ĠMiller": 16932, - "Ġliking": 16933, - "Ġinhab": 16934, - "Ġgratitude": 16935, - "ĠJournal": 16936, - "arness": 16937, - "John": 16938, - "ĠGit": 16939, - "åīĽ": 16940, - "Ġsincere": 16941, - "ĠSci": 16942, - "ĠEli": 16943, - "Ġsymbols": 16944, - "Ġmanually": 16945, - "εÏĤ": 16946, - "ĠвÑĸд": 16947, - "ĠFat": 16948, - "Ġlabels": 16949, - "Ġsophisticated": 16950, - "umps": 16951, - "Ġreleases": 16952, - "Ġ47": 16953, - "ĠOM": 16954, - "ê°Ģë": 16955, - "ĠBien": 16956, - "ĠRef": 16957, - "è¨ĺ": 16958, - "ĠSta": 16959, - "ĠEgg": 16960, - "Ġindicator": 16961, - "pson": 16962, - "Ġnasıl": 16963, - "Right": 16964, - "Ġconvey": 16965, - "Ġknot": 16966, - "Ġconnects": 16967, - "ulas": 16968, - "Ġpreced": 16969, - "Ġinequality": 16970, - "amiento": 16971, - "Ġreply": 16972, - "OY": 16973, - "Ġdismiss": 16974, - "ĠëIJľ": 16975, - "çĦ¡": 16976, - "ĠÑħоÑĢоÑĪо": 16977, - "Ġméd": 16978, - "Ġrandomly": 16979, - "ĠOnt": 16980, - "uard": 16981, - "Ġpulls": 16982, - "ĠÑĤепеÑĢÑĮ": 16983, - "ĠNeed": 16984, - "ĠSoft": 16985, - "Ġstrengths": 16986, - "Ġgoed": 16987, - "umen": 16988, - "æŃ»": 16989, - "Ġíݸ": 16990, - "Ġдоб": 16991, - "Ġclarity": 16992, - "ĠAi": 16993, - "Ġballoon": 16994, - "ĠPand": 16995, - "ĠìķĦëĭ": 16996, - "Ġshiny": 16997, - "Ġsmallest": 16998, - "onia": 16999, - "hill": 17000, - "oting": 17001, - "Ġeing": 17002, - "Ġmerely": 17003, - "Ġseus": 17004, - "Ġнеп": 17005, - "ĠíĨµ": 17006, - "Ġguides": 17007, - "Ġspecialist": 17008, - "Ġsteak": 17009, - "ãĤĪãģĨ": 17010, - "Ġmigration": 17011, - "quele": 17012, - "Ġruined": 17013, - "Ġpupp": 17014, - "女": 17015, - "Ġkend": 17016, - "angan": 17017, - "Ġpalm": 17018, - "Ġunfair": 17019, - "Ġzm": 17020, - "ĠDV": 17021, - "chester": 17022, - "иÑİ": 17023, - "Ġooh": 17024, - "erg": 17025, - "ATH": 17026, - "°©": 17027, - "åĵª": 17028, - "rison": 17029, - "Ġinvolving": 17030, - "Ġpartly": 17031, - "ançais": 17032, - "Ġvow": 17033, - "Ġprominent": 17034, - "Ġcryst": 17035, - "iba": 17036, - "Ġdeserves": 17037, - "Ġovert": 17038, - "Ġsensit": 17039, - "ĠWhe": 17040, - "Ġtighten": 17041, - "Ġintimid": 17042, - "Ġaliment": 17043, - "will": 17044, - "Ġstrengthen": 17045, - "ĠTan": 17046, - "åıĪ": 17047, - "ãģĹãģ¾ãģĻ": 17048, - "oni": 17049, - "ĠMun": 17050, - "Ġproph": 17051, - "Ġrehears": 17052, - "ĠKle": 17053, - "Ġveces": 17054, - "Ġwondered": 17055, - "oki": 17056, - "Ġsenses": 17057, - "´ìĭ": 17058, - "Æ°á»Ľ": 17059, - "ĠÈĻi": 17060, - "Ġmuchos": 17061, - "Ġwatches": 17062, - "ortunate": 17063, - "ĠJuan": 17064, - "ìŀĸìķĦ": 17065, - "ÑĢе": 17066, - "ei": 17067, - "ionen": 17068, - "Ġexperimental": 17069, - "Ġdaughters": 17070, - "à¸Ľ": 17071, - "Ġmentally": 17072, - "becca": 17073, - "aware": 17074, - "ìĦĿ": 17075, - "Ġwhatsoever": 17076, - "Ġenables": 17077, - "ĠLow": 17078, - "oid": 17079, - "à¸Ĭ": 17080, - "ód": 17081, - "غ": 17082, - "Ġconstructed": 17083, - "ĠLadies": 17084, - "Ġaccused": 17085, - "Ġан": 17086, - "Dan": 17087, - "Ġspawn": 17088, - "Ġcontainers": 17089, - "Ġartistic": 17090, - "ıp": 17091, - "Ġdiscl": 17092, - "Ġautres": 17093, - "inas": 17094, - "ĠNation": 17095, - "Ġnag": 17096, - "bean": 17097, - "whe": 17098, - "ľëıĦ": 17099, - "ĠSeoul": 17100, - "Ġíı¬": 17101, - "ĠNich": 17102, - "Ġcomplement": 17103, - "Ġinterven": 17104, - "ĠModel": 17105, - "ĠOrange": 17106, - "namon": 17107, - "Ġcalculation": 17108, - "see": 17109, - "Ġustedes": 17110, - "Ġleb": 17111, - "Ġdoct": 17112, - "Ñĸн": 17113, - "Ġfoster": 17114, - "Ġelastic": 17115, - "ĠAhh": 17116, - "Ġace": 17117, - "ĠPink": 17118, - "ĠJeg": 17119, - "Ġdeer": 17120, - "ãģĹãģĦ": 17121, - "sis": 17122, - "Ġjako": 17123, - "ĠEmma": 17124, - "ÑģÑĤвенно": 17125, - "Ġportrait": 17126, - "Ġmaker": 17127, - "Ġaument": 17128, - "ÑĢоб": 17129, - "Ġairplane": 17130, - "Ġtransparency": 17131, - "Ġadjustment": 17132, - "ĠCDC": 17133, - "çon": 17134, - "Ġuploaded": 17135, - "ĠдейÑģÑĤв": 17136, - "ĠгоÑĤов": 17137, - "Ġiter": 17138, - "Ġcurse": 17139, - "ôn": 17140, - "merce": 17141, - "aran": 17142, - "Ġleak": 17143, - "çµIJ": 17144, - "Ġabsence": 17145, - "Ñģкий": 17146, - "Ġreaders": 17147, - "aler": 17148, - "Ġbeneath": 17149, - "ango": 17150, - "hetic": 17151, - "Ġfinns": 17152, - "Ġpoop": 17153, - "Ġduplic": 17154, - "Hi": 17155, - "igs": 17156, - "ologically": 17157, - "opp": 17158, - "Ġdizer": 17159, - "ĠAllen": 17160, - "Ġgli": 17161, - "Ġacceleration": 17162, - "Ġvitamin": 17163, - "ãĥŃ": 17164, - "vä": 17165, - "ĠAccess": 17166, - "à®Ļ": 17167, - "rás": 17168, - "Ġappreciated": 17169, - "Ġnah": 17170, - "Ġposter": 17171, - "Ġtale": 17172, - "Ġhighlighted": 17173, - "æĸĩ": 17174, - "żeli": 17175, - "Ġblockchain": 17176, - "Ġmicrow": 17177, - "Ġcinema": 17178, - "ĠChang": 17179, - "ĠSearch": 17180, - "usters": 17181, - "ĠZero": 17182, - "ĠDivision": 17183, - "ÑĢаÑģ": 17184, - "Ġscare": 17185, - "Ġjelly": 17186, - "ĠAdministration": 17187, - "SO": 17188, - "Ġlined": 17189, - "Ġê°Ħ": 17190, - "Ġgeben": 17191, - "Ġsoda": 17192, - "Ġwinners": 17193, - "³¼": 17194, - "ÙĴ": 17195, - "ĠAmb": 17196, - "åķıé¡Į": 17197, - "åĶ": 17198, - "Ġpeg": 17199, - "å·±": 17200, - "43": 17201, - "Ġraus": 17202, - "Ġrewards": 17203, - "Ġinclus": 17204, - "Ġhighway": 17205, - "Ġhah": 17206, - "Ġmultiplied": 17207, - "Ġsẽ": 17208, - "Ġdisciples": 17209, - "Ġning": 17210, - "Ġdressing": 17211, - "Ġattributes": 17212, - "ĠMosc": 17213, - "ĠGreece": 17214, - "Ġsek": 17215, - "ĠLearn": 17216, - "Ġjus": 17217, - "rendre": 17218, - "Ġpersonne": 17219, - "plete": 17220, - "Ġplacing": 17221, - "Ġluego": 17222, - "illance": 17223, - "ĠобÑī": 17224, - "Ġprovision": 17225, - "Ġlion": 17226, - "tra": 17227, - "boards": 17228, - "Ġbehaviour": 17229, - "hey": 17230, - "Ġsubscription": 17231, - "Ġprotagon": 17232, - "ãĥ£": 17233, - "Ġvara": 17234, - "ĠÅŁu": 17235, - "Ġhaha": 17236, - "Ġteaspoon": 17237, - "æŁ": 17238, - "avoir": 17239, - "Ġcrypto": 17240, - "ĠÑģÑĤаÑĢ": 17241, - "ĠStore": 17242, - "abs": 17243, - "ĠStudents": 17244, - "Ġlaund": 17245, - "into": 17246, - "Ġapproached": 17247, - "°ľ": 17248, - "ÑĥÑİÑī": 17249, - "ĠLabor": 17250, - "otes": 17251, - "iatric": 17252, - "ĠgroÃŁ": 17253, - "utive": 17254, - "Ġид": 17255, - "ĠGib": 17256, - "Ġplacement": 17257, - "ĠdifÃŃcil": 17258, - "Ġfrog": 17259, - "ĠвÑģеÑħ": 17260, - "ĠJr": 17261, - "azed": 17262, - "ÑĥÑī": 17263, - "Ġê¼": 17264, - "frame": 17265, - "аеÑĪÑĮ": 17266, - "Ġlockdown": 17267, - "åij³": 17268, - "Ġmedi": 17269, - "Ġ×Ķ×ŀ×": 17270, - "ений": 17271, - "emale": 17272, - "ì¢ħ": 17273, - "ateral": 17274, - "Ġdistant": 17275, - "Ġbears": 17276, - "Ġjournalist": 17277, - "解": 17278, - "ĠMarshall": 17279, - "ĠIhnen": 17280, - "uetooth": 17281, - "bag": 17282, - "ĠÄijã": 17283, - "ĠHighness": 17284, - "Ġì°į": 17285, - "ика": 17286, - "ĠWu": 17287, - "ĠFran": 17288, - "Ġpeng": 17289, - "Ġfon": 17290, - "Ġhypothesis": 17291, - "ĠÑĢÑĥ": 17292, - "Ġly": 17293, - "×ļ": 17294, - "ìĽĶ": 17295, - "ĠRadio": 17296, - "à¸ŀ": 17297, - "Dav": 17298, - "Ġembarrassing": 17299, - "ĠìŀĪìĸ´": 17300, - "Ġcasting": 17301, - "Ġcage": 17302, - "ĠPsych": 17303, - "ĠìĿ¼ëĭ¨": 17304, - "Ġž": 17305, - "imb": 17306, - "Ġdirectors": 17307, - "SH": 17308, - "ĠÏĦην": 17309, - "á»ģu": 17310, - "ĠkonuÅŁ": 17311, - "Ġoptional": 17312, - "quarters": 17313, - "iker": 17314, - "ĠSant": 17315, - "Ġverses": 17316, - "ë¶Ģ": 17317, - "Ġolar": 17318, - "ĠÏĩ": 17319, - "ãĥķ": 17320, - "Ġγια": 17321, - "ĠImm": 17322, - "Ġcontroversial": 17323, - "Ġersten": 17324, - "Ġrecip": 17325, - "ĠChristianity": 17326, - "Ġê´ľ": 17327, - "ordon": 17328, - "×ķש": 17329, - "Ġslash": 17330, - "ĠPf": 17331, - "ÑĥдÑĮ": 17332, - "×ķ×Ŀ": 17333, - "ĠPerry": 17334, - "Ġmamy": 17335, - "Ġbackgrounds": 17336, - "Ġà®İன": 17337, - "Ġpendant": 17338, - "ĠColumbia": 17339, - "Ġinverse": 17340, - "ĠÑĩеÑĢез": 17341, - "Ġsv": 17342, - "Ġdigging": 17343, - "41": 17344, - "chem": 17345, - "Ġnavigation": 17346, - "ĠShin": 17347, - "ĠFront": 17348, - "PD": 17349, - "Ġbearing": 17350, - "ĠWasser": 17351, - "Ġwax": 17352, - "ĠCHRIS": 17353, - "ching": 17354, - "Ġpressed": 17355, - "El": 17356, - "ĠDal": 17357, - "onsin": 17358, - "Ġbinding": 17359, - "Ñģкой": 17360, - "poons": 17361, - "Ġmock": 17362, - "arest": 17363, - "кÑĢа": 17364, - "MM": 17365, - "Ġcorrupt": 17366, - "storm": 17367, - "Ġrefres": 17368, - "ĠCoach": 17369, - "llä": 17370, - "ĠTHIS": 17371, - "Ġparag": 17372, - "Ġìĵ°": 17373, - "pool": 17374, - "Ġbillions": 17375, - "Ġê¹Ģ": 17376, - "group": 17377, - "Ġwelcoming": 17378, - "cellence": 17379, - "ĠDuke": 17380, - "긴": 17381, - "Ġprimera": 17382, - "ìł¸": 17383, - "Ġpond": 17384, - "Ġstatue": 17385, - "Ġ구ë": 17386, - "Ġhatch": 17387, - "Ġinstrumental": 17388, - "Ġresidential": 17389, - "커": 17390, - "Ġaccepting": 17391, - "oshi": 17392, - "date": 17393, - "ĠìĶ¨": 17394, - "Ġplanted": 17395, - "Ġjoking": 17396, - "ĠìĦľ": 17397, - "Ġhated": 17398, - "ĠÑĢаÑģÑģк": 17399, - "Ġslept": 17400, - "Ġpackages": 17401, - "Ġislands": 17402, - "esen": 17403, - "ģı": 17404, - "Ġdiagon": 17405, - "ĠOsc": 17406, - "Ġmesh": 17407, - "Ġscales": 17408, - "arity": 17409, - "ĠDefense": 17410, - "ãģ¡ãĤĩ": 17411, - "ĠLewis": 17412, - "ĠÑģегоднÑı": 17413, - "Ġflies": 17414, - "uinely": 17415, - "ĠConsider": 17416, - "Ġstark": 17417, - "hew": 17418, - "ĠAsÃŃ": 17419, - "³´ë": 17420, - "Ġpropose": 17421, - "Ġíķĺë©´": 17422, - "odo": 17423, - "ĠNormally": 17424, - "Ġheeft": 17425, - "ĠHarris": 17426, - "gro": 17427, - "ĠBlood": 17428, - "base": 17429, - "ĠiOS": 17430, - "Ġtouches": 17431, - "Ġinspir": 17432, - "Ġ×ĵ": 17433, - "Ġbinary": 17434, - "Ġì¶Ķ": 17435, - "Ġserial": 17436, - "Ġion": 17437, - "Ġunemployment": 17438, - "Ġodds": 17439, - "ĠFab": 17440, - "ĠFBI": 17441, - "BRUN": 17442, - "Ġweights": 17443, - "νο": 17444, - "atile": 17445, - "Ġnurses": 17446, - "Ġinvolvement": 17447, - "ĠíĶ¼": 17448, - "Ġgovernance": 17449, - "ĠâĤ¬": 17450, - "ÑĢÑĥп": 17451, - "ierra": 17452, - "íĺķ": 17453, - "ĠJerry": 17454, - "Ġbeard": 17455, - "Ġsalvation": 17456, - "ĠAlong": 17457, - "gentle": 17458, - "ĠKi": 17459, - "bol": 17460, - "ĠPlat": 17461, - "Ġhasht": 17462, - "è¿ij": 17463, - "Ġware": 17464, - "Ġpartie": 17465, - "ycz": 17466, - "Ġintr": 17467, - "Fih": 17468, - "nent": 17469, - "Ġcheat": 17470, - "ilen": 17471, - "Ġë¯": 17472, - "orie": 17473, - "Ġfácil": 17474, - "etric": 17475, - "Ġaffecting": 17476, - "unciation": 17477, - "Ġaffairs": 17478, - "Ġbee": 17479, - "Ġviewing": 17480, - "Ġorang": 17481, - "ĠLan": 17482, - "ĠСÑĤ": 17483, - "ä¸ĸ": 17484, - "ĠMes": 17485, - "ĥģ": 17486, - "erie": 17487, - "Ġespa": 17488, - "Ġinterpre": 17489, - "Ġpossess": 17490, - "Ġpurely": 17491, - "rito": 17492, - "found": 17493, - "asma": 17494, - "ìłģìĿ¸": 17495, - "Ġexamine": 17496, - "ĠÑĥм": 17497, - "Ġbesch": 17498, - "ĠTomorrow": 17499, - "ĠBlock": 17500, - "Ġvariant": 17501, - "Ġpreference": 17502, - "Ġcoaches": 17503, - "Ġmedications": 17504, - "ĠíĺĦ": 17505, - "Ġempire": 17506, - "ëĦ¤": 17507, - "ĠIllinois": 17508, - "Ġcrispy": 17509, - "Ġthì": 17510, - "Ġbees": 17511, - "77": 17512, - "Ġglow": 17513, - "èº": 17514, - "ĠStudies": 17515, - "åIJĦ": 17516, - "ĠChallenge": 17517, - "Ġunlikely": 17518, - "Ч": 17519, - "ıyorsun": 17520, - "DIE": 17521, - "Ġminimize": 17522, - "izard": 17523, - "Ġún": 17524, - "Ġencontrar": 17525, - "ĠKill": 17526, - "å»": 17527, - "Ġvanilla": 17528, - "ĠGrant": 17529, - "ĠGT": 17530, - "sea": 17531, - "Ġsought": 17532, - "вод": 17533, - "Ġnäm": 17534, - "ĠAunt": 17535, - "OWN": 17536, - "Ġpumpkin": 17537, - "stellen": 17538, - "Ġrag": 17539, - "егда": 17540, - "Ġstoryt": 17541, - "Ġforum": 17542, - "æ©Ł": 17543, - "Ġestaba": 17544, - "uche": 17545, - "Ġcongress": 17546, - "ĠRey": 17547, - "Ġdramatically": 17548, - "ĠSport": 17549, - "ĠYellow": 17550, - "Ġê³ĦìĨį": 17551, - "Ġdisgusting": 17552, - "ĠRecent": 17553, - "Ġacquired": 17554, - "Ġcables": 17555, - "çĶļ": 17556, - "din": 17557, - "Ġvisto": 17558, - "Ġcommunicating": 17559, - "ÑģÑĤавлÑı": 17560, - "еÑģÑĤо": 17561, - "ãĥ»ãĥ»ãĥ»": 17562, - "Ġrég": 17563, - "Ġsocks": 17564, - "Ġproces": 17565, - "because": 17566, - "Ġutter": 17567, - "Ġcolocar": 17568, - "Ġnewest": 17569, - "Ġgramm": 17570, - "表": 17571, - "ä¸įçŁ¥éģĵ": 17572, - "Ġshifting": 17573, - "Ġcarrier": 17574, - "ĠÑģкоÑĢ": 17575, - "ĠSchw": 17576, - "Ġexecuted": 17577, - "Ġmaintained": 17578, - "ĠÏĨ": 17579, - "ĠMoses": 17580, - "Ġdisse": 17581, - "Ġhorr": 17582, - "ãĢľ": 17583, - "Ġrally": 17584, - "Ġallem": 17585, - "ĠEventually": 17586, - "Ġdiyor": 17587, - "lvania": 17588, - "Ġschnell": 17589, - "Ġê³¼": 17590, - "Ġ매": 17591, - "Ġstruggles": 17592, - "late": 17593, - "Ġclarify": 17594, - "ément": 17595, - "Ġmultiplic": 17596, - "ибо": 17597, - "Ġjourn": 17598, - "Ġfragr": 17599, - "Ġsurprisingly": 17600, - "Ġdesperate": 17601, - "52": 17602, - "Ġsul": 17603, - "ĠRead": 17604, - "ĠFried": 17605, - "Ġmond": 17606, - "woo": 17607, - "Ġorganizing": 17608, - "ãģĹãĤĩãģĨ": 17609, - "ĠSoon": 17610, - "ĠвопÑĢоÑģ": 17611, - "ĠNur": 17612, - "ĠÐĹд": 17613, - "Ġspider": 17614, - "еÑģÑı": 17615, - "Ġtutorials": 17616, - "Ġnutrients": 17617, - "orer": 17618, - "Ġcoefficient": 17619, - "Ġarrangement": 17620, - "Ġpricing": 17621, - "nan": 17622, - "yu": 17623, - "BL": 17624, - "Ġtribe": 17625, - "ĠHoward": 17626, - "unks": 17627, - "Ġnewer": 17628, - "Ġprovin": 17629, - "Ġprediction": 17630, - "hos": 17631, - "Ġolsun": 17632, - "ĠAround": 17633, - "Ġvier": 17634, - "ĠÑģÑĤоÑĢон": 17635, - "Ġvalley": 17636, - "ĠEla": 17637, - "ifi": 17638, - "Ġgalaxy": 17639, - "Ġtranqu": 17640, - "Ġadvers": 17641, - "ĠTemple": 17642, - "iffs": 17643, - "igence": 17644, - "èĩªå·±": 17645, - "Ġkönnte": 17646, - "ĠÄijó": 17647, - "Did": 17648, - "Ġphotographs": 17649, - "ĠAWS": 17650, - "ÑĨиÑı": 17651, - "Ġguards": 17652, - "Ġappointed": 17653, - "ĠGil": 17654, - "Ġмом": 17655, - "Ġcod": 17656, - "ĠUnlike": 17657, - "Ġevenly": 17658, - "isconsin": 17659, - "Ġestou": 17660, - "Ġmnie": 17661, - "ĠExec": 17662, - "ĠMV": 17663, - "ĠEine": 17664, - "ä¿¡": 17665, - "ĠRoger": 17666, - "ĠFac": 17667, - "ĠList": 17668, - "Ġfuer": 17669, - "аеÑĤе": 17670, - "omed": 17671, - "Ġattraction": 17672, - "èī²": 17673, - "Ġterrain": 17674, - "ĠDrop": 17675, - "Ġcorporations": 17676, - "Ġsciences": 17677, - "Ġthrone": 17678, - "ãģĦãģŁ": 17679, - "Ġaj": 17680, - "ĠRot": 17681, - "çī¹": 17682, - "Ġsupporters": 17683, - "ĠBere": 17684, - "Here": 17685, - "Ġdiferentes": 17686, - "Ġsignificance": 17687, - "Ïĥη": 17688, - "æĪij覺å¾Ĺ": 17689, - "Ġclamp": 17690, - "ĠëĮĢë": 17691, - "Ġfabulous": 17692, - "rez": 17693, - "æĮģ": 17694, - "Ġassumptions": 17695, - "uther": 17696, - "wid": 17697, - "pot": 17698, - "è¿İ": 17699, - "Ġyan": 17700, - "ulin": 17701, - "ÑĢÑĭв": 17702, - "ĠSlow": 17703, - "ĠPennsy": 17704, - "Ġíķ´ìĦľ": 17705, - "Ġmeio": 17706, - "Ġwealthy": 17707, - "ĠEight": 17708, - "Ġpulse": 17709, - "Ġfriction": 17710, - "idity": 17711, - "ĠHoll": 17712, - "iyorum": 17713, - "Ġsounded": 17714, - "ĠCarr": 17715, - "Ġfork": 17716, - "âĺ": 17717, - "ĠPA": 17718, - "Ġconspir": 17719, - "Ġcoding": 17720, - "rt": 17721, - "ĠTyp": 17722, - "Ġìĸij": 17723, - "Ġпог": 17724, - "Ġmiser": 17725, - "ĠÑģмоÑĤÑĢ": 17726, - "ĠSweden": 17727, - "Ġolarak": 17728, - "ĠZhang": 17729, - "ĠChi": 17730, - "ĠTitan": 17731, - "Ġscreening": 17732, - "ĠSpider": 17733, - "ĠÅŀimdi": 17734, - "Ġobstacles": 17735, - "lara": 17736, - "Ġchallenged": 17737, - "pse": 17738, - "TON": 17739, - "ụ": 17740, - "ĠPi": 17741, - "Ġlagi": 17742, - "ieurs": 17743, - "Ġhurting": 17744, - "Ġneglect": 17745, - "Ġgenerating": 17746, - "Ġyoungest": 17747, - "Ġaudit": 17748, - "ĠÑĢез": 17749, - "Ïģά": 17750, - "Ġdonate": 17751, - "ĠPDF": 17752, - "Ġvisits": 17753, - "Ġcruise": 17754, - "PP": 17755, - "aser": 17756, - "Ġwsp": 17757, - "backs": 17758, - "ivals": 17759, - "ãģĨãĤĵ": 17760, - "Ġdeve": 17761, - "Ġproport": 17762, - "Ġcath": 17763, - "ĠEffect": 17764, - "Ġwinds": 17765, - "ĠìĻĶ": 17766, - "Ġcharts": 17767, - "Ġsama": 17768, - "Ġautomation": 17769, - "Ġпока": 17770, - "Ġolan": 17771, - "Ġboats": 17772, - "Ġcafe": 17773, - "Ġdenied": 17774, - "ĠMama": 17775, - "Ġblocking": 17776, - "ĠThor": 17777, - "Ġphenomenal": 17778, - "Ġstakeholders": 17779, - "Ġunos": 17780, - "ÑĥеÑĤ": 17781, - "ĠAbraham": 17782, - "ãģ§ãĤĤ": 17783, - "Ġdetection": 17784, - "Ġjuris": 17785, - "Ġpowered": 17786, - "zial": 17787, - "Ġwelfare": 17788, - "Ġupgrad": 17789, - "Ġmożna": 17790, - "ĠCase": 17791, - "cular": 17792, - "ĶìĿ´": 17793, - "ãĥģ": 17794, - "ĠGuess": 17795, - "Ġcycles": 17796, - "ä¾ĭ": 17797, - "給": 17798, - "rock": 17799, - "umi": 17800, - "Ġelite": 17801, - "Ġquè": 17802, - "åł±": 17803, - "ÑĤом": 17804, - "Ġshore": 17805, - "gunta": 17806, - "Ġku": 17807, - "Ġfaithful": 17808, - "ĠJeremy": 17809, - "aid": 17810, - "à·": 17811, - "ugal": 17812, - "å°įåķĬ": 17813, - "ĠVel": 17814, - "Ġvrai": 17815, - "stell": 17816, - "¨¸": 17817, - "Ġkol": 17818, - "è½": 17819, - "Ġquanto": 17820, - "ĠзаÑĢ": 17821, - "Ġ2002": 17822, - "esy": 17823, - "Ġreserve": 17824, - "ĠмоменÑĤ": 17825, - "Ġdeployed": 17826, - "Ġdefining": 17827, - "Ġsau": 17828, - "Ġgaat": 17829, - "\")": 17830, - "Ġtransmit": 17831, - "Ġpublishing": 17832, - "Ġranking": 17833, - "Ġoffense": 17834, - "Ġ46": 17835, - "pin": 17836, - "ĠTaking": 17837, - "Ġentitled": 17838, - "Ġgenuinely": 17839, - "Ġvariations": 17840, - "Ġfinde": 17841, - "Ġtau": 17842, - "Ġunfortunate": 17843, - "ĠRah": 17844, - "ports": 17845, - "ĠcÅ": 17846, - "Ġmonkey": 17847, - "Ġbrac": 17848, - "wei": 17849, - "lung": 17850, - "Ġartif": 17851, - "Ġsyrup": 17852, - "ĠÐĶав": 17853, - "Ġlifted": 17854, - "Ġchez": 17855, - "ĠAdvent": 17856, - "ĠStock": 17857, - "Ġdol": 17858, - "мен": 17859, - "иÑĪÑĮ": 17860, - "Ġyn": 17861, - "gio": 17862, - "det": 17863, - "Ġdesse": 17864, - "Ġgri": 17865, - "ĠChairman": 17866, - "çħ": 17867, - "Ġcuenta": 17868, - "anim": 17869, - "Ġcrab": 17870, - "Ġescal": 17871, - "Ġpremière": 17872, - "ĠGef": 17873, - "Ġdining": 17874, - "Ġseventh": 17875, - "Ġchasing": 17876, - "ĠTower": 17877, - "Ġbrutal": 17878, - "Ġfundamentally": 17879, - "ãģ¨ãģĨ": 17880, - "лениÑı": 17881, - "stage": 17882, - "Ġacquis": 17883, - "Ġcylinder": 17884, - "Ġcommander": 17885, - "mem": 17886, - "ĠUV": 17887, - "happy": 17888, - "Ġepsilon": 17889, - "Ġinvitation": 17890, - "Ġfarmer": 17891, - "chair": 17892, - "Ġdestiny": 17893, - "Ġsovere": 17894, - "ĠHebrew": 17895, - "Ġservant": 17896, - "Ġbew": 17897, - "Ġgast": 17898, - "uties": 17899, - "Ġadministrative": 17900, - "ĠCommand": 17901, - "éta": 17902, - "Ġnitrogen": 17903, - "ê·¼": 17904, - "Ġabi": 17905, - "Ġvillain": 17906, - "Ġblanket": 17907, - "ĠSend": 17908, - "Ġbeaten": 17909, - "²Ħ": 17910, - "Ġvolunt": 17911, - "Ġscholar": 17912, - "ĠEmperor": 17913, - "Ġ43": 17914, - "vable": 17915, - "ĠDus": 17916, - "ĠGU": 17917, - "Ġtargeting": 17918, - "www": 17919, - "Ġamendment": 17920, - "ìĨĮë": 17921, - "Ġting": 17922, - "Ġnasty": 17923, - "Ġgauge": 17924, - "ĠÑĢод": 17925, - "ĠHans": 17926, - "Your": 17927, - "αν": 17928, - "Ġprojet": 17929, - "ĠHawaii": 17930, - "Ġsuspicious": 17931, - "Ġschw": 17932, - "Ġremoval": 17933, - "Ġintrig": 17934, - "ĠMU": 17935, - "Ġponto": 17936, - "ा": 17937, - "ĠобÑĢаз": 17938, - "Ġguessing": 17939, - "pace": 17940, - "Ġmothers": 17941, - "Ġmillimeter": 17942, - "ление": 17943, - "没æľī": 17944, - "Ġavailability": 17945, - "icz": 17946, - "æѤ": 17947, - "Ġfract": 17948, - "Ġbases": 17949, - "km": 17950, - "ĠBTS": 17951, - "ĠField": 17952, - "Ġdzie": 17953, - "Ġsegundo": 17954, - "ĠëĤĺëĬĶ": 17955, - "Ġlegitimate": 17956, - "imas": 17957, - "Ġвн": 17958, - "Ġcorruption": 17959, - "Ġsmash": 17960, - "ĠValent": 17961, - "Ġaligned": 17962, - "ĠPennsylvania": 17963, - "Ġgab": 17964, - "ĠEun": 17965, - "enth": 17966, - "ĠMorning": 17967, - "Ġcandle": 17968, - "Ġbackpack": 17969, - "ĠIslamic": 17970, - "ações": 17971, - "Ġencry": 17972, - "Ġmushrooms": 17973, - "íĮĮ": 17974, - "dit": 17975, - "Ġtransit": 17976, - "ĠWisconsin": 17977, - "Ġparticipated": 17978, - "ĠIls": 17979, - "Ġunfold": 17980, - "¶Ģë": 17981, - "Ġprofits": 17982, - "Ġwarming": 17983, - "ĠGang": 17984, - "Ġnetworking": 17985, - "Ġmega": 17986, - "Ġthoroughly": 17987, - "lements": 17988, - "ĠHm": 17989, - "Ġdeciding": 17990, - "Ġemotionally": 17991, - "Ġexhausted": 17992, - "ĠÐŁÐ¾ÑĤ": 17993, - "cido": 17994, - "ĠHTML": 17995, - "Ġcopyright": 17996, - "Ġmelody": 17997, - "yim": 17998, - "Ġanders": 17999, - "oshop": 18000, - "Ġë³¼": 18001, - "Ġathlete": 18002, - "ĠGE": 18003, - "Ġfrequent": 18004, - "Ġdesires": 18005, - "Ġneeding": 18006, - "ĠYun": 18007, - "Ġrifle": 18008, - "Ġlover": 18009, - "'T": 18010, - "Ġdense": 18011, - "Ġtão": 18012, - "Ġnotified": 18013, - "Ġidi": 18014, - "ìĹŃ": 18015, - "íĨ": 18016, - "Ġinteracting": 18017, - "Ġrapport": 18018, - "еÑĢи": 18019, - "ski": 18020, - "Ġbesser": 18021, - "Ġmanufacturer": 18022, - "ĠKyle": 18023, - "Ġaccountable": 18024, - "ĠSak": 18025, - "ĠPil": 18026, - "ĠDomin": 18027, - "Ġpresum": 18028, - "ĠÐĴÑģе": 18029, - "Ġvinegar": 18030, - "Ġguaranteed": 18031, - "çľĭåĪ°": 18032, - "Ġhandled": 18033, - "éŁ³": 18034, - "cat": 18035, - "Ġcivilization": 18036, - "Ġaccomp": 18037, - "ĠVM": 18038, - "émon": 18039, - "Ġdeze": 18040, - "Ġgrades": 18041, - "Ġsollte": 18042, - "Ġstaring": 18043, - "×IJת": 18044, - "arnt": 18045, - "Ġhorizon": 18046, - "Ġtravail": 18047, - "hour": 18048, - "第ä¸Ģ": 18049, - "ĠED": 18050, - "ĠDak": 18051, - "Ġny": 18052, - "Ġconve": 18053, - "ĠCham": 18054, - "Ġfirms": 18055, - "ĠLiu": 18056, - "ĠÑģÑĤÑĢан": 18057, - "Ġlibert": 18058, - "Ġlenses": 18059, - "Ġintake": 18060, - "ĠвÑĭб": 18061, - "Ġmensen": 18062, - "hel": 18063, - "Ġpractition": 18064, - "Ġ350": 18065, - "ãĤ³": 18066, - "FO": 18067, - "Ġbeds": 18068, - "Ġancestors": 18069, - "ĠìĹĦì²Ń": 18070, - "Ġdisturb": 18071, - "ĠLastly": 18072, - "ĠSupport": 18073, - "ีà¹ī": 18074, - "ĠCorona": 18075, - "Ġenthusi": 18076, - "Ġвозм": 18077, - "ĠìĤ¬ëŀĮë": 18078, - "Ġ52": 18079, - "bird": 18080, - "Ġreduces": 18081, - "ĠìŀĪìĿĦ": 18082, - "ĠGene": 18083, - "êµIJ": 18084, - "ÄĻp": 18085, - "ĠÃľber": 18086, - "Ġconcerning": 18087, - "user": 18088, - "Ġconcentrate": 18089, - "ĠWHAT": 18090, - "ishop": 18091, - "onymous": 18092, - "nold": 18093, - "Ġsuggesting": 18094, - "©°": 18095, - "ĠFish": 18096, - "........": 18097, - "Ġvessel": 18098, - "Ġtrabajo": 18099, - "ãģµ": 18100, - "ĠOcean": 18101, - "å§IJ": 18102, - "yg": 18103, - "Ġtowns": 18104, - "del": 18105, - "Ġterrifying": 18106, - "ĠçalÄ±ÅŁ": 18107, - "Ġsino": 18108, - "Ġeats": 18109, - "Ġgez": 18110, - "Ġgeme": 18111, - "ĠìĻĦ": 18112, - "Ġcompart": 18113, - "Ġimplementing": 18114, - "ĠPotter": 18115, - "ĠGermans": 18116, - "ĠgÅĤ": 18117, - "Ġtennis": 18118, - "Ġcarpet": 18119, - "auer": 18120, - "ĠSaudi": 18121, - "yeong": 18122, - "Ġcurry": 18123, - "ĠForest": 18124, - "Ñĭл": 18125, - "Ġfifteen": 18126, - "Ġbolts": 18127, - "Ġ{\\": 18128, - "¬´": 18129, - "Ġsettlement": 18130, - "Ġlange": 18131, - "Ġbam": 18132, - "Get": 18133, - "íķĻ": 18134, - "Ġswap": 18135, - "ĠKhan": 18136, - "Ġcommence": 18137, - "Ġquarantine": 18138, - "Ġscored": 18139, - "çĸ": 18140, - "Ġ1950": 18141, - "Ġthicker": 18142, - "Ġsûr": 18143, - "åı£": 18144, - "ĠLarry": 18145, - "Ġallez": 18146, - "ìĭľëĬĶ": 18147, - "Ġgü": 18148, - "Ġspectacular": 18149, - "//": 18150, - "both": 18151, - "Ġstats": 18152, - "妳": 18153, - "ĠNancy": 18154, - "Ġbunu": 18155, - "Ġcrust": 18156, - "Ġactivated": 18157, - "Ġê·¸ëŀ": 18158, - "outhe": 18159, - "Ġports": 18160, - "Ġneural": 18161, - "Ġjaw": 18162, - "Ġobservations": 18163, - "Ġvoit": 18164, - "aban": 18165, - "ải": 18166, - "¦¬ë¥¼": 18167, - "omes": 18168, - "à¯ĭ": 18169, - "qui": 18170, - "Ġkindness": 18171, - "Ðij": 18172, - "Ġ41": 18173, - "Ġmoderate": 18174, - "Ġangels": 18175, - "ĠTamb": 18176, - "èt": 18177, - "Ġchlor": 18178, - "ĠBilly": 18179, - "ì²ĺë": 18180, - "acon": 18181, - "Ġselecting": 18182, - "ĠDelta": 18183, - "Ġnull": 18184, - "denly": 18185, - "Ġciud": 18186, - "Ġtendency": 18187, - "Ġbreakdown": 18188, - "Ġmint": 18189, - "ÑĦоÑĢм": 18190, - "orph": 18191, - "Ġdawn": 18192, - "spr": 18193, - "ĠWILL": 18194, - "ächlich": 18195, - "Ġpuppy": 18196, - "700": 18197, - "Ġத": 18198, - "Ġfails": 18199, - "ĠConc": 18200, - "Ġrelatives": 18201, - "Ġinviting": 18202, - "Ġautonom": 18203, - "Ġcomposed": 18204, - "Ġunity": 18205, - "Ġdecis": 18206, - "Ġaccessories": 18207, - "ĠCass": 18208, - "Ġbist": 18209, - "ĠTip": 18210, - "째": 18211, - "Ġpunt": 18212, - "Ġráp": 18213, - "éĢ²": 18214, - "ANK": 18215, - "ãģļ": 18216, - "exist": 18217, - "Ġcompatible": 18218, - "Ġner": 18219, - "ĠемÑĥ": 18220, - "Ġaplic": 18221, - "Ġbapt": 18222, - "Ġfailing": 18223, - "ĠTamam": 18224, - "Ġoscill": 18225, - "Ġletzten": 18226, - "Ġrepeatedly": 18227, - "Ġjungle": 18228, - "ĠPush": 18229, - "hai": 18230, - "Ġη": 18231, - "Ġdeadly": 18232, - "Ñıж": 18233, - "wiÄħ": 18234, - "ĠCommon": 18235, - "ĠÎķ": 18236, - "Ġskate": 18237, - "TC": 18238, - "ĠMini": 18239, - "Ġhobby": 18240, - "ần": 18241, - "Ġroutes": 18242, - "Ġamigos": 18243, - "Ġconjun": 18244, - "Ġpartnerships": 18245, - "Ġnovo": 18246, - "Ġaver": 18247, - "Ġpouvez": 18248, - "bridge": 18249, - "Ġpreoc": 18250, - "him": 18251, - "Ġturb": 18252, - "Ġsob": 18253, - "ĠSnap": 18254, - "Ġì°¸": 18255, - "minute": 18256, - "Ġtraject": 18257, - "ujÄĻ": 18258, - "Ġeager": 18259, - "Ġregulatory": 18260, - "Ġbanking": 18261, - "bling": 18262, - "ÑĪÑĮ": 18263, - "aż": 18264, - "Ġbizarre": 18265, - "itated": 18266, - "dire": 18267, - "Ġthreatened": 18268, - "Ġshining": 18269, - "Ġnesse": 18270, - "Ġcorps": 18271, - "ĠÑģÑĥ": 18272, - "Ġteles": 18273, - "Ġtemp": 18274, - "tem": 18275, - "Ġкан": 18276, - "Ġfever": 18277, - "New": 18278, - "Ġheavier": 18279, - "ĠSah": 18280, - "bud": 18281, - "Ġoutros": 18282, - "Ġì°¾": 18283, - "Ġëªħ": 18284, - "arring": 18285, - "Ġê´ľì°®": 18286, - "ĠNap": 18287, - "Ġsemin": 18288, - "ĠThan": 18289, - "ifs": 18290, - "Ġdesen": 18291, - "ĠÑĤакое": 18292, - "Ġloses": 18293, - "ĠBalt": 18294, - "kon": 18295, - "ĠнапÑĢ": 18296, - "Ġvois": 18297, - "ĠMoscow": 18298, - "Ġchairs": 18299, - "his": 18300, - "Ġrefugees": 18301, - "kg": 18302, - "Ġkole": 18303, - "į¨": 18304, - "аÑģибо": 18305, - "¦½": 18306, - "ĠUniverse": 18307, - "ĠDirect": 18308, - "Ġcheating": 18309, - "ĠCin": 18310, - "Ġpatri": 18311, - "Ġadvise": 18312, - "ĠNether": 18313, - "Ġprimeiro": 18314, - "Ġmentioning": 18315, - "nut": 18316, - "56": 18317, - "arı": 18318, - "Ġpetite": 18319, - "bled": 18320, - "Ġpensar": 18321, - "icio": 18322, - "IND": 18323, - "Ġveteran": 18324, - "Ġladder": 18325, - "Ġconsequence": 18326, - "ожал": 18327, - "ĠBurn": 18328, - "Ġrug": 18329, - "ĠMade": 18330, - "Ġgit": 18331, - "\"...": 18332, - "Ġcompetitors": 18333, - "Ġprzed": 18334, - "Ġapparent": 18335, - "ĠArgentina": 18336, - "ĠWorking": 18337, - "Ġcollaborate": 18338, - "woman": 18339, - "Ġretain": 18340, - "Ġleurs": 18341, - "Ġdashboard": 18342, - "×Ļ×ĵ": 18343, - "ĠEarly": 18344, - "BM": 18345, - "ĠеÑij": 18346, - "олог": 18347, - "Ġsatisfying": 18348, - "Ġoftentimes": 18349, - "Ġmapping": 18350, - "ünkü": 18351, - "arth": 18352, - "fold": 18353, - "Ġlaunching": 18354, - "Ġaura": 18355, - "Ġprecision": 18356, - "works": 18357, - "God": 18358, - "Ġstrap": 18359, - "ĠImper": 18360, - "Ġrivers": 18361, - "Ġ|": 18362, - "Ġcuer": 18363, - "regon": 18364, - "Ġarrival": 18365, - "каÑħ": 18366, - "ĠMiami": 18367, - "анÑĭ": 18368, - "Ġsurvivors": 18369, - "ĠSenior": 18370, - "David": 18371, - "Ġestado": 18372, - "Ġsectors": 18373, - "Ġpopping": 18374, - "Ġchim": 18375, - "ayı": 18376, - "Ġkunnen": 18377, - "Ġgallery": 18378, - "Ġsunlight": 18379, - "esehen": 18380, - "Ġyelling": 18381, - "ĠMein": 18382, - "ĠPhoenix": 18383, - "Ġmano": 18384, - "Ġhistoria": 18385, - "Ġoccurring": 18386, - "欸": 18387, - "ì¸": 18388, - "ади": 18389, - "å¾ħ": 18390, - "Ġinstitutional": 18391, - "ĠTut": 18392, - "ç²": 18393, - "Ġslaves": 18394, - "ãģ©ãģĨ": 18395, - "Ġforgiveness": 18396, - "Ġtwin": 18397, - "ĠHyun": 18398, - "нÑĮ": 18399, - "ĠKomm": 18400, - "andra": 18401, - "shot": 18402, - "ssä": 18403, - "ĠÑĨе": 18404, - "atta": 18405, - "Ġexpense": 18406, - "ĠGPU": 18407, - "ĠPast": 18408, - "ribly": 18409, - "ĠëŃIJìķ¼": 18410, - "Ġгода": 18411, - "Ġrespir": 18412, - "æĿ±": 18413, - "ĠQueens": 18414, - "hops": 18415, - "Ġsérie": 18416, - "Ġpref": 18417, - "Ġcomed": 18418, - "Ġplut": 18419, - "ĠOverall": 18420, - "ĠãģĿ": 18421, - "Ġcush": 18422, - "Ġringing": 18423, - "Ġincorrect": 18424, - "ĠÑģÑĤÑĢ": 18425, - "Ġgeometry": 18426, - "Ġadvertis": 18427, - "ĠШ": 18428, - "Ġreviewed": 18429, - "ãģĤãģĤ": 18430, - "Ġdozens": 18431, - "Ġdetermination": 18432, - "ĠPhill": 18433, - "Ġcontributed": 18434, - "ĠCit": 18435, - "Ġpassengers": 18436, - "Ġcôté": 18437, - "Ġrever": 18438, - "Ġtechnological": 18439, - "Ġallen": 18440, - "Ġraining": 18441, - "avi": 18442, - "Ġsalty": 18443, - "Ġtyping": 18444, - "ĠÑĤе": 18445, - "Ġtilt": 18446, - "Ġì¹ĺ": 18447, - "ĠоÑĢ": 18448, - "ĠпÑĢÑıм": 18449, - "Ġrou": 18450, - "Ġarena": 18451, - "arat": 18452, - "åĪ«": 18453, - "HHHH": 18454, - "Ġmanufacturers": 18455, - "ĠEdward": 18456, - "Ġtuck": 18457, - "Ġblows": 18458, - "ingo": 18459, - "ĠMarc": 18460, - "ìķĦìĦľ": 18461, - "Mich": 18462, - "ĠClean": 18463, - "è´": 18464, - "esto": 18465, - "ĠPack": 18466, - "Ġshaft": 18467, - "BRUNO": 18468, - "Ġaven": 18469, - "uur": 18470, - "ÑģколÑĮко": 18471, - "ê´Ģ": 18472, - "Ġautomated": 18473, - "Ġventure": 18474, - "Ġsurveillance": 18475, - "ĠGrow": 18476, - "ĠEmer": 18477, - "ĠдоÑĢ": 18478, - "Ġinvestor": 18479, - "ĠYok": 18480, - "Ġlatter": 18481, - "ĠNI": 18482, - "Ġfunctioning": 18483, - "ĠHamilton": 18484, - "Ġ51": 18485, - "Ġmurdered": 18486, - "Ġanchor": 18487, - "Ġcuc": 18488, - "ĠSCP": 18489, - "ĠMadam": 18490, - "Ġconstraints": 18491, - "Ġbarn": 18492, - "anken": 18493, - "Ġë§İìĿĢ": 18494, - "ĠMotor": 18495, - "ĠDoing": 18496, - "Ġamen": 18497, - "etts": 18498, - "Ġinstructor": 18499, - "egt": 18500, - "ako": 18501, - "Ġposture": 18502, - "ivia": 18503, - "ĠPolish": 18504, - "Ġдва": 18505, - "Ġcolorful": 18506, - "Ġelbow": 18507, - "Ġparle": 18508, - "Ġpasser": 18509, - "Ġcondem": 18510, - "ortal": 18511, - "Ġfertil": 18512, - "اد": 18513, - "ĠColomb": 18514, - "Ġalignment": 18515, - "Ġastronaut": 18516, - "ĠMut": 18517, - "Ġsalmon": 18518, - "Ġstructured": 18519, - "ŀר": 18520, - "Ġclicks": 18521, - "Ġmiej": 18522, - "æĶ¿": 18523, - "ãģĦãĤĦ": 18524, - "ĠRound": 18525, - "Ġrainbow": 18526, - "ĠVA": 18527, - "ãģĶãģĸ": 18528, - "ì§Ī": 18529, - "otz": 18530, - ",": 21732, - "Ġchords": 21733, - "ĠSanders": 21734, - "Ġë¶Ħë": 21735, - "Ben": 21736, - "Ġdarüber": 21737, - "ilians": 21738, - "Ġordering": 21739, - "ĠManh": 21740, - "Ġkilogram": 21741, - "ĠkarÅŁ": 21742, - "Ġgrasp": 21743, - "Ġghosts": 21744, - "alen": 21745, - "ĠJedi": 21746, - "Ġбли": 21747, - "Ġdownloaded": 21748, - "Ġconducting": 21749, - "ĠHak": 21750, - "Ġresearcher": 21751, - "ilan": 21752, - "good": 21753, - "ĠHannah": 21754, - "ĠdÃ¼ÅŁÃ¼n": 21755, - "ĠMessiah": 21756, - "uity": 21757, - "iona": 21758, - "Ġprobable": 21759, - "ĠYE": 21760, - "Ġindependently": 21761, - "Ġbuffer": 21762, - "burn": 21763, - "ourd": 21764, - "ĠMcK": 21765, - "Ġlingu": 21766, - "ujemy": 21767, - "еÑĢÑĤ": 21768, - "Ġintuitive": 21769, - "Ġcracks": 21770, - "appropri": 21771, - "nty": 21772, - "Ġgeen": 21773, - "Ġlend": 21774, - "Ġcertification": 21775, - "IDS": 21776, - "unter": 21777, - "pees": 21778, - "Ġtrump": 21779, - "Ġbankrupt": 21780, - "Ġfeas": 21781, - "èĹ": 21782, - "Ġduż": 21783, - "æ¸ħ": 21784, - "Ġviruses": 21785, - "Ġ58": 21786, - "god": 21787, - "Ġжел": 21788, - "Ġstalk": 21789, - "Ind": 21790, - "achi": 21791, - "ĠCF": 21792, - "ĠCond": 21793, - "Ġsanct": 21794, - "Ġconten": 21795, - "Ġfreed": 21796, - "ĠRT": 21797, - "Ġmentors": 21798, - "족": 21799, - "Ġportable": 21800, - "ĠPaulo": 21801, - "rane": 21802, - "HAHA": 21803, - "ĠSection": 21804, - "çĨ": 21805, - "hyun": 21806, - "ĠÎŃÏĩ": 21807, - "ĠPub": 21808, - "ĠIndepend": 21809, - "Ġcompounds": 21810, - "ĠÑģÑĭ": 21811, - "Ġmessaging": 21812, - "Ġdedication": 21813, - "Ġnoticing": 21814, - "Ġdevoted": 21815, - "ÑİÑĤÑģÑı": 21816, - "Ġsnakes": 21817, - "Ġbattlefield": 21818, - "pers": 21819, - "Ġdela": 21820, - "92": 21821, - "Ġhai": 21822, - "illä": 21823, - "érer": 21824, - "every": 21825, - "Ġresponsive": 21826, - "×Ļ×ķ": 21827, - "opf": 21828, - "éī": 21829, - "Ĭ¸": 21830, - "Because": 21831, - "Ġtourism": 21832, - "Ġê·¸ê²Į": 21833, - "×ķצ": 21834, - "Ġcans": 21835, - "stüt": 21836, - "Ġdonne": 21837, - "ĠDios": 21838, - "ĠUber": 21839, - "actory": 21840, - "Ġoriented": 21841, - "ĠHerm": 21842, - "Ġpatron": 21843, - "urf": 21844, - "bei": 21845, - "Ġprograma": 21846, - "ĠOhh": 21847, - "gener": 21848, - "Ġfist": 21849, - "ĠWendy": 21850, - "Ġanda": 21851, - "Ġguessed": 21852, - "Ġfreak": 21853, - "ä¸Ńåľĭ": 21854, - "ĠKings": 21855, - "chool": 21856, - "Ġoffline": 21857, - "ĠIndiana": 21858, - "ĠAlliance": 21859, - "Ġ53": 21860, - "Ġparticul": 21861, - "ĠFocus": 21862, - "Ġinhabit": 21863, - "Ġê°ĻìĿĢëį°": 21864, - "ĠMcG": 21865, - "owski": 21866, - "ĠìĿ´ê±´": 21867, - "ĠpaÅĦst": 21868, - "они": 21869, - "itta": 21870, - "Ġconfirmation": 21871, - "ĠBrooklyn": 21872, - "Ġnoodle": 21873, - "fund": 21874, - "itud": 21875, - "Ġgrandparents": 21876, - "Ġbarbecue": 21877, - "ειÏĤ": 21878, - "Ġá": 21879, - "Ġballot": 21880, - "ĠVeter": 21881, - "Ġpipes": 21882, - "igious": 21883, - "ĠGraph": 21884, - "ested": 21885, - "Ġë¸Įë": 21886, - "ĠKE": 21887, - "ãģ¡ãĤĩãģ£ãģ¨": 21888, - "Ġeins": 21889, - "Ġhatred": 21890, - "ãģijãģ©": 21891, - "Ġdang": 21892, - "eeee": 21893, - "Ġarchae": 21894, - "ĠJesse": 21895, - "Ġdetected": 21896, - "Ġseni": 21897, - "burgh": 21898, - "Ġdisplacement": 21899, - "Ġdop": 21900, - "Ġconditioning": 21901, - "ĠнеÑģколÑĮко": 21902, - "Ġdisturbing": 21903, - "PH": 21904, - "Ġthinner": 21905, - "Ġwounded": 21906, - "ĠCuando": 21907, - "Ġcushion": 21908, - "Ġwhites": 21909, - "Ġpreferences": 21910, - "Ġì¤Ģë¹Ħ": 21911, - "Ġkaż": 21912, - "ĠGate": 21913, - "ĠPath": 21914, - "dles": 21915, - "à¸Ħร": 21916, - "imore": 21917, - "Ġë³´ìŬ": 21918, - "Ġdisciplines": 21919, - "á»ı": 21920, - "Ġmesma": 21921, - "ĠìĥĪë": 21922, - "Ġìĭ¬": 21923, - "Ġging": 21924, - "Ġumbrella": 21925, - "IGHT": 21926, - "Ġpension": 21927, - "Ġcombining": 21928, - "SS": 21929, - "Ġrectangle": 21930, - "á»ĩt": 21931, - "Ġproxim": 21932, - "ĠCow": 21933, - "¸Į": 21934, - "Ġintentional": 21935, - "æķĻ": 21936, - "Ġdecid": 21937, - "ĠÑģкаж": 21938, - "ĠUma": 21939, - "iasm": 21940, - "buz": 21941, - "Ġdebris": 21942, - "Ġcass": 21943, - "ĠProp": 21944, - "iska": 21945, - "ëł¥": 21946, - "esterol": 21947, - "ussian": 21948, - "ìĿ´ëŀij": 21949, - "Ġunlimited": 21950, - "Ġadmire": 21951, - "Ġtightly": 21952, - "Ġgenome": 21953, - "ĠJunior": 21954, - "venir": 21955, - "gus": 21956, - "ĠcÄĥ": 21957, - "ĠVlad": 21958, - "ĠíĤ": 21959, - "Ġrelativ": 21960, - "inci": 21961, - "Ġaunque": 21962, - "ĠBoys": 21963, - "ÑĨион": 21964, - "ĠSwiss": 21965, - "Ġphysicians": 21966, - "Ġíıī": 21967, - "ĠPET": 21968, - "Ġwounds": 21969, - "about": 21970, - "Ãłi": 21971, - "onz": 21972, - "urities": 21973, - "ĠÑĥвид": 21974, - "å·¦": 21975, - "Ġmentality": 21976, - "Ġvariance": 21977, - "Ġsegunda": 21978, - "Ġvolcano": 21979, - "alie": 21980, - "à¥ĩ": 21981, - "Ġtiles": 21982, - "ĠTerry": 21983, - "ĠاÙĦÙĦÙĩ": 21984, - "Ġcanon": 21985, - "Ġscattered": 21986, - "pton": 21987, - "Ġdefinitions": 21988, - "Ġalgebra": 21989, - "oten": 21990, - "ablo": 21991, - "ijuana": 21992, - "Ġwrapping": 21993, - "Ġsesame": 21994, - "ĠнаÑĩина": 21995, - "ĠAlf": 21996, - "ĠÐłÐ¾ÑģÑģ": 21997, - "orno": 21998, - "Ġankle": 21999, - "Ġspecialty": 22000, - "Ġattempting": 22001, - "iliation": 22002, - "Ġ1920": 22003, - "Ġphenomena": 22004, - "ĠProduct": 22005, - "ĠBuck": 22006, - "ĠAww": 22007, - "seen": 22008, - "Ġvoid": 22009, - "ĠFranklin": 22010, - "Ġadvocacy": 22011, - "ĠSep": 22012, - "Ġcoolest": 22013, - "ĠÑģÑĢазÑĥ": 22014, - "ĠQuand": 22015, - "Ġ900": 22016, - "ĠTrad": 22017, - "dies": 22018, - "Ġhash": 22019, - "æĪijå°±": 22020, - "ä¹Łæĺ¯": 22021, - "Ġpots": 22022, - "Ġsadly": 22023, - "Ġviable": 22024, - "ĠTiger": 22025, - "ĠONE": 22026, - "Ġneurons": 22027, - "owanie": 22028, - "ÄĹ": 22029, - "ĠShar": 22030, - "ĠLandes": 22031, - "Ġconferences": 22032, - "該": 22033, - "Ġcredential": 22034, - "Ġlime": 22035, - "inee": 22036, - "xit": 22037, - "pay": 22038, - "Ġincons": 22039, - "Ġ>>:": 22040, - "èªį": 22041, - "Ġíŀĺë": 22042, - "Ġlesser": 22043, - "Ġspill": 22044, - "Ġpremise": 22045, - "Ġ365": 22046, - "ĠHost": 22047, - "Ġtomar": 22048, - "×IJ׾": 22049, - "ë²Ī": 22050, - "ĠWhats": 22051, - "Ġlightweight": 22052, - "ĠMap": 22053, - "fia": 22054, - "ellschaft": 22055, - "Ġvendors": 22056, - "uesto": 22057, - "ĠMister": 22058, - "ĠÐŁÑĢи": 22059, - "åı³": 22060, - "hma": 22061, - "Ġintentionally": 22062, - "ĠTang": 22063, - "éĹ®": 22064, - "Ġidentification": 22065, - "Ġetcetera": 22066, - "ĠNee": 22067, - "ĠÑĤÑĢи": 22068, - "ê·¸": 22069, - "Ġcryptocur": 22070, - "Ġinhale": 22071, - "Ġaddict": 22072, - "åIJĦä½į": 22073, - "Ġmau": 22074, - "ĠÑĤакаÑı": 22075, - "Ġë²Ħ": 22076, - "Ġcomprar": 22077, - "iedzieÄĩ": 22078, - "ĠоÑĤно": 22079, - "Ġbeginner": 22080, - "ĠмÑĥж": 22081, - "Ġobsc": 22082, - "Ġlimiting": 22083, - "ascular": 22084, - "Ġinspection": 22085, - "aci": 22086, - "Ġrejo": 22087, - "Mus": 22088, - "Ġzaten": 22089, - "Ġszcz": 22090, - "ĠMadrid": 22091, - "Ġvarieties": 22092, - "ĠestÃł": 22093, - "ĠShakes": 22094, - "Ġkits": 22095, - "Ġadminister": 22096, - "Ġlava": 22097, - "ĠgÃ¥": 22098, - "試": 22099, - "ת×Ļ": 22100, - "ĠWayne": 22101, - "Ġinstagram": 22102, - "Ġrated": 22103, - "paper": 22104, - "Ġbild": 22105, - "Ġpretending": 22106, - "Ġobserving": 22107, - "ĠÑģамом": 22108, - "Ġtror": 22109, - "Ġorganisms": 22110, - "Ġfalta": 22111, - "Ġhometown": 22112, - "ç±": 22113, - "Ġíĭ": 22114, - "Ġcheg": 22115, - "Ġì¡": 22116, - "Ġcomma": 22117, - "isé": 22118, - "Ġlikelihood": 22119, - "avored": 22120, - "Ġgeldi": 22121, - "ников": 22122, - "Ġmedio": 22123, - "Ġjakie": 22124, - "ĠJup": 22125, - "Ġgreenhouse": 22126, - "Ġspit": 22127, - "кое": 22128, - "Ġкаж": 22129, - "ĠGram": 22130, - "ĠConference": 22131, - "Ġdeficit": 22132, - "sın": 22133, - "inse": 22134, - "uÄŁ": 22135, - "Ġricht": 22136, - "Ġcoincidence": 22137, - "åıį": 22138, - "Ġeurop": 22139, - "Ġbutterfly": 22140, - "pread": 22141, - "Ġìĸ¼": 22142, - "èĢ¶": 22143, - "Ġwavel": 22144, - "ĠInfin": 22145, - "ĠPlanet": 22146, - "Ġselfie": 22147, - "ientras": 22148, - "Ġarrog": 22149, - "oser": 22150, - "idal": 22151, - "ł×Ĺ׳×ķ": 22152, - "ütün": 22153, - "Ġfreshman": 22154, - "ĠMachine": 22155, - "ÏĥÏĦ": 22156, - "ĠDia": 22157, - "ìĿ´ëĭ¤": 22158, - "ãģĵãģĨ": 22159, - "nea": 22160, - "Ġlisting": 22161, - "Ġconfigure": 22162, - "utor": 22163, - "Up": 22164, - "tschaft": 22165, - "rière": 22166, - "Ġupwards": 22167, - "ĠÑħоÑĩÑĥ": 22168, - "Ġsweep": 22169, - "Br": 22170, - "Ġexpressing": 22171, - "Ġunhappy": 22172, - "Ġmandatory": 22173, - "gender": 22174, - "ĠAÃŃ": 22175, - "Ġindicators": 22176, - "Ġoils": 22177, - "note": 22178, - "Ġsegur": 22179, - "ожеÑĤ": 22180, - "ynasty": 22181, - "Ġdistances": 22182, - "Ġmerge": 22183, - "BERT": 22184, - "Ġsurrender": 22185, - "Ġbuat": 22186, - "ĠAwards": 22187, - "Ġseñor": 22188, - "odox": 22189, - "Ġflavour": 22190, - "Ġabdom": 22191, - "Ġconfigur": 22192, - "86": 22193, - "ĠDIY": 22194, - "Ġrigid": 22195, - "°ĺ": 22196, - "Ġcorporation": 22197, - "Ġgroom": 22198, - "jaw": 22199, - "ĠNear": 22200, - "ило": 22201, - "Ġopera": 22202, - "ĠInnov": 22203, - "иÑĢа": 22204, - "ĵ±": 22205, - "Ġspecified": 22206, - "Ġcosm": 22207, - "ĠFreedom": 22208, - "Ġclown": 22209, - "ĠNem": 22210, - "Ġвол": 22211, - "Ñijн": 22212, - "Ġcharger": 22213, - "à¹ģล": 22214, - "Ġinfluential": 22215, - "äsident": 22216, - "é¤": 22217, - "ĠìĦłë": 22218, - "Ġvolumes": 22219, - "æIJ": 22220, - "Ġoutras": 22221, - "ĠTwitch": 22222, - "Ġfounding": 22223, - "Ġawhile": 22224, - "Ġcoil": 22225, - "ê°Ļ": 22226, - "Ġcả": 22227, - "ĠThrow": 22228, - "ĠHence": 22229, - "ommt": 22230, - "ĠBenjamin": 22231, - "глÑıд": 22232, - "Time": 22233, - "obic": 22234, - "Ġmour": 22235, - "Ġdread": 22236, - "ĠLÃł": 22237, - "ĠChile": 22238, - "Ġpreval": 22239, - "Ġvain": 22240, - "Ġartık": 22241, - "Ġpreserved": 22242, - "ĠоÑĤд": 22243, - "Ġwarehouse": 22244, - "Ġbeste": 22245, - "ĠSeveral": 22246, - "ĠSituation": 22247, - "Ġcardboard": 22248, - "Tod": 22249, - "erna": 22250, - "Ġgarant": 22251, - "Ġgesture": 22252, - "Ġhen": 22253, - "Ġspelling": 22254, - "osexual": 22255, - "Ġanne": 22256, - "Ġmice": 22257, - "ĠMeine": 22258, - "card": 22259, - "Ġrebell": 22260, - "Ġcerto": 22261, - "Ġìľłë": 22262, - "Ġverschied": 22263, - "ĠBos": 22264, - "Ġinvention": 22265, - "Ġtrze": 22266, - "Ġmanière": 22267, - "ĠChad": 22268, - "Ġspre": 22269, - "Ġorganisations": 22270, - "Ġpoorly": 22271, - "Ġanterior": 22272, - "Ġstair": 22273, - "кÑĢ": 22274, - "Ġatomic": 22275, - "Ġsympath": 22276, - "Ġcontinually": 22277, - "Ġkleine": 22278, - "ète": 22279, - "иÑī": 22280, - "οÏĤ": 22281, - "peut": 22282, - "Ġreposit": 22283, - "Ġentra": 22284, - "Em": 22285, - "Ġfinancing": 22286, - "Ġмног": 22287, - "Ġthesis": 22288, - "ĠComputer": 22289, - "eau": 22290, - "ĠTree": 22291, - "Ġbride": 22292, - "onsieur": 22293, - "shire": 22294, - "wic": 22295, - "DE": 22296, - "ĠìĪĺë": 22297, - "Ġacom": 22298, - "ĠPO": 22299, - "ersch": 22300, - "ĠпомоÑī": 22301, - "ĠArmen": 22302, - "Ġ죽": 22303, - "Ġzor": 22304, - "Ġprints": 22305, - "ĠDass": 22306, - "港": 22307, - "Ġdurable": 22308, - "ĠTransport": 22309, - "ìŀIJê°Ģ": 22310, - "Ġлег": 22311, - "Ġdét": 22312, - "ôle": 22313, - "amous": 22314, - "YN": 22315, - "Ġcliff": 22316, - "Ġgrammar": 22317, - "ĠÐŁÐ¾ÑįÑĤомÑĥ": 22318, - "ĠlÃłm": 22319, - "esch": 22320, - "Ġmiserable": 22321, - "Ġvolts": 22322, - "ĠCad": 22323, - "ukan": 22324, - "ÑĤив": 22325, - "rust": 22326, - "Ġìĺ¬ëĿ¼": 22327, - "Ġverk": 22328, - "Ġchickens": 22329, - "ĠYoo": 22330, - "Ġoutfits": 22331, - "code": 22332, - "Ġhierarchy": 22333, - "netes": 22334, - "Ġcounterpart": 22335, - "Ġtôi": 22336, - "Ġted": 22337, - "ĠBart": 22338, - "ĠëĿ¼": 22339, - "ĠGenau": 22340, - "Ġincoming": 22341, - "ĠABC": 22342, - "rique": 22343, - "ĠоÑĤп": 22344, - "qual": 22345, - "Ġincentive": 22346, - "Ġihren": 22347, - "׳×Ļ": 22348, - "loe": 22349, - "Ġ1930": 22350, - "Ġbarg": 22351, - "Ġdiction": 22352, - "Ġönce": 22353, - "INS": 22354, - "Ġreh": 22355, - "isiaj": 22356, - "mouth": 22357, - "Ġscoring": 22358, - "lık": 22359, - "ĠìķĦ주": 22360, - "ORIA": 22361, - "ĠEstados": 22362, - "Ġcompanion": 22363, - "Ġassemble": 22364, - "Ġpunished": 22365, - "Ġital": 22366, - "Ġprevents": 22367, - "istes": 22368, - "ĠKentucky": 22369, - "Ġlocate": 22370, - "Ġfasting": 22371, - "ãģ¨æĢĿ": 22372, - "ĥĢ": 22373, - "ĠSeb": 22374, - "ĠCrown": 22375, - "opia": 22376, - "Ġwhip": 22377, - "usz": 22378, - "ками": 22379, - "Ġdatabases": 22380, - "åŃĹ": 22381, - "Ġprosec": 22382, - "Ġ1997": 22383, - "ĠìĤ´ì§Ŀ": 22384, - "ĠSolar": 22385, - "ĠPues": 22386, - "ĠZen": 22387, - "ollo": 22388, - "ĠGuru": 22389, - "Ġsqueez": 22390, - "ĠÐĹа": 22391, - "ĠÄį": 22392, - "ceptions": 22393, - "cca": 22394, - "izable": 22395, - "mand": 22396, - "Ġbreakthrough": 22397, - "Ġtablespoon": 22398, - "ĠSEC": 22399, - "ikh": 22400, - "ĠSão": 22401, - "Ġпло": 22402, - "amen": 22403, - "Ġprac": 22404, - "Ġdarling": 22405, - "Ġtaller": 22406, - "Ġrendering": 22407, - "Ġìļ°ë¦¬ê°Ģ": 22408, - "ĠÏĦηÏĤ": 22409, - "Ġmã": 22410, - "Ġesos": 22411, - "uerdo": 22412, - "ĠÑģÑĩиÑĤ": 22413, - "aller": 22414, - "ìĹĪìĸ´ìļĶ": 22415, - "Ġmillones": 22416, - "lerin": 22417, - "Ġpegar": 22418, - "onne": 22419, - "Ġenrollment": 22420, - "Ġliegt": 22421, - "Ġboa": 22422, - "wiÄĻ": 22423, - "bsp": 22424, - "Ġcycling": 22425, - "ĠBernie": 22426, - "Ġ1989": 22427, - "ĠдалÑĮ": 22428, - "ĠDakota": 22429, - "ĠÑģвÑıз": 22430, - "ĠCP": 22431, - "Ġstare": 22432, - "íĤ¤": 22433, - "Ġprosperity": 22434, - "Ġarrangements": 22435, - "Ġarriving": 22436, - "mä": 22437, - "Ġkayak": 22438, - "ipt": 22439, - "Ġpardon": 22440, - "Ġrelat": 22441, - "Ġverste": 22442, - "ĠFig": 22443, - "Ġfoil": 22444, - "ĠTalking": 22445, - "peare": 22446, - "Ġnoi": 22447, - "ĠпÑĢиÑĪ": 22448, - "Ġhockey": 22449, - "Ġado": 22450, - "ĠOUT": 22451, - "67": 22452, - "Ġhormones": 22453, - "ĠAvenue": 22454, - "ĠSuperman": 22455, - "Ġprescription": 22456, - "ubernetes": 22457, - "CL": 22458, - "otive": 22459, - "NIS": 22460, - "ienen": 22461, - "Ġsadness": 22462, - "ĠVit": 22463, - "Ty": 22464, - "Ġstarter": 22465, - "Ġbede": 22466, - "Ġfoundations": 22467, - "Ġsore": 22468, - "åºĹ": 22469, - "ÑīеÑģÑĤв": 22470, - "ìļ°ë": 22471, - "ĠÑĩÑĥв": 22472, - "link": 22473, - "Ġmaneu": 22474, - "working": 22475, - "Ãłn": 22476, - "ĠAttack": 22477, - "ĠCart": 22478, - "veis": 22479, - "ĠResp": 22480, - "ensing": 22481, - "Ġì¢ĭìķĦìļĶ": 22482, - "Ġescuch": 22483, - "ĠRNA": 22484, - "Ĥ´": 22485, - "Ġadop": 22486, - "Ġbending": 22487, - "عد": 22488, - "Ġmanages": 22489, - "usp": 22490, - "Ġtart": 22491, - "Ġrouter": 22492, - "Bo": 22493, - "Ġestablishing": 22494, - "Ġbalancing": 22495, - "Ġathletic": 22496, - "ĠSlo": 22497, - "Ġfills": 22498, - "Ġнаб": 22499, - "Ġдал": 22500, - "Ġposso": 22501, - "ĠVielen": 22502, - "Ġcritics": 22503, - "Ġlawsuit": 22504, - "ĠIsaac": 22505, - "ĠÑĦилÑĮм": 22506, - "Ġtras": 22507, - "Ġpraw": 22508, - "ĠCrazy": 22509, - "Ġneu": 22510, - "Ġkull": 22511, - "Ġtumor": 22512, - "ĠAPP": 22513, - "gate": 22514, - "ĠARE": 22515, - "98": 22516, - "ĠSteam": 22517, - "Ġfucked": 22518, - "lage": 22519, - "ĠâĻ¬": 22520, - "ĠMD": 22521, - "fy": 22522, - "Ġshells": 22523, - "ĠSeems": 22524, - "izers": 22525, - "Ġranges": 22526, - "ĠAntonio": 22527, - "ATION": 22528, - "ĠBaba": 22529, - "Ġìĥī": 22530, - "kun": 22531, - "Ġprayed": 22532, - "ÑĢÑı": 22533, - "ĠпÑĢоÑĤив": 22534, - "Ġseas": 22535, - "bury": 22536, - "Ġ×Ķש": 22537, - "Ġtrait": 22538, - "ĠDepending": 22539, - "Ġdre": 22540, - "Ġkönnt": 22541, - "ÑĨÑĥ": 22542, - "Ġlipstick": 22543, - "eez": 22544, - "ĠпÑĢимеÑĢ": 22545, - "Ġassignments": 22546, - "Bob": 22547, - "Ġmetals": 22548, - "Ġspecially": 22549, - "å°įä¸įå°į": 22550, - "ĠìĺĪë": 22551, - "ĠÅ¡": 22552, - "Ġvista": 22553, - "Ġά": 22554, - "Ġtwins": 22555, - "Ġnotable": 22556, - "ĠSau": 22557, - "Ġdévelop": 22558, - "Ġçek": 22559, - "Ġpolynom": 22560, - "avam": 22561, - "Ġtambé": 22562, - "оном": 22563, - "Ġplasma": 22564, - "Ġefect": 22565, - "Ġläng": 22566, - "Ġcasi": 22567, - "Ñģа": 22568, - "ımı": 22569, - "ãģĻãĤĭ": 22570, - "ĵ¤ìĿĢ": 22571, - "Ġlabour": 22572, - "ossen": 22573, - "ĠPun": 22574, - "rif": 22575, - "Ġdoses": 22576, - "Ġoperates": 22577, - "илли": 22578, - "Ġjaar": 22579, - "staw": 22580, - "ĠìĤ¬ëŀij": 22581, - "Ġatm": 22582, - "Ġprotects": 22583, - "Ġimped": 22584, - "HO": 22585, - "Ġcima": 22586, - "Ġtoch": 22587, - "abis": 22588, - "Ġsendo": 22589, - "laus": 22590, - "Ġcurl": 22591, - "ĠNum": 22592, - "Ġsponsors": 22593, - "Ġdébut": 22594, - "ĠAlexa": 22595, - "ĠBür": 22596, - "ĠAmer": 22597, - "Ġcope": 22598, - "Ġизв": 22599, - "jal": 22600, - "Ġ1995": 22601, - "apat": 22602, - "resse": 22603, - "ĠPrize": 22604, - "ĠClaire": 22605, - "ĠBrandon": 22606, - "Ġwszystko": 22607, - "Ġvalued": 22608, - "à¸Ļะ": 22609, - "Ġsect": 22610, - "Ġsecretly": 22611, - "Ġdiamonds": 22612, - "ĠEvan": 22613, - "ĠRPG": 22614, - "ãģ«ãģª": 22615, - "ĪëıĦ": 22616, - "ĠUniversal": 22617, - "Ġdoubts": 22618, - "ĠPin": 22619, - "wiÄħz": 22620, - "ļ©": 22621, - "Ġalbo": 22622, - "Ġbraucht": 22623, - "AUL": 22624, - "ĠMobile": 22625, - "grades": 22626, - "Ġschem": 22627, - "why": 22628, - "ĠNicht": 22629, - "pi": 22630, - "gle": 22631, - "Ġchorus": 22632, - "Ġgly": 22633, - "Ġreinforce": 22634, - "Ġmuff": 22635, - "ĠShen": 22636, - "ĠHola": 22637, - "Ñĥг": 22638, - "videmment": 22639, - "vial": 22640, - "acious": 22641, - "laimed": 22642, - "ĠRico": 22643, - "Ġvegg": 22644, - "Ġillustration": 22645, - "ĠButter": 22646, - "owad": 22647, - "Ġeux": 22648, - "Ġenfants": 22649, - "ĠLeader": 22650, - "ĠVillage": 22651, - "etically": 22652, - "ÙĨÙĬ": 22653, - "Ġstew": 22654, - "Ġsurprises": 22655, - "Ġcue": 22656, - "ĠGrandma": 22657, - "ĠCelsius": 22658, - "ĠRicht": 22659, - "enc": 22660, - "Ġpetition": 22661, - "Ġherb": 22662, - "Ġwicked": 22663, - "Ġschle": 22664, - "ocaly": 22665, - "Ġtransf": 22666, - "Ġtokens": 22667, - "ĠGray": 22668, - "ĠBBC": 22669, - "IK": 22670, - "Ġ1500": 22671, - "zn": 22672, - "ĠNev": 22673, - "Ġkoy": 22674, - "Ġzar": 22675, - "Ġbullshit": 22676, - "ĠColombia": 22677, - "ulative": 22678, - "Ġwidespread": 22679, - "yect": 22680, - "kit": 22681, - "Ġempresa": 22682, - "Ġnour": 22683, - "Ġburns": 22684, - "atin": 22685, - "aired": 22686, - "Ġrevolutionary": 22687, - "ĠгодÑĥ": 22688, - "ĠLogan": 22689, - "Ġ1996": 22690, - "ĠGraham": 22691, - "reb": 22692, - "ĠNHS": 22693, - "æľĽ": 22694, - "Ġcostumes": 22695, - "Ġnawet": 22696, - "Ġlovers": 22697, - "ĠLucy": 22698, - "ĠIndigenous": 22699, - "íķĺ기": 22700, - "Ġimmunity": 22701, - "¥´ë": 22702, - "uito": 22703, - "Ġexcessive": 22704, - "Ġdonations": 22705, - "Ġ×Ķר": 22706, - "Ġ첫": 22707, - "éīĦ": 22708, - "Ġdrying": 22709, - "melon": 22710, - "Ġsurveys": 22711, - "Ġ무ìĬ¨": 22712, - "風": 22713, - "aaa": 22714, - "Ġprobe": 22715, - "ancial": 22716, - "Ġlouder": 22717, - "Ġhotels": 22718, - "Ã¼ÄŁ": 22719, - "agner": 22720, - "Ġorigins": 22721, - "Ġë§Īì§Ģë§ī": 22722, - "Ġ**": 22723, - "Ġstrangers": 22724, - "ĠHaus": 22725, - "comed": 22726, - "Ġanthrop": 22727, - "Ġuso": 22728, - "ĠìķĦì§ģ": 22729, - "ĠYuan": 22730, - "ĠíķĦìļĶ": 22731, - "pler": 22732, - "ressive": 22733, - "Ġspraw": 22734, - "ĠStew": 22735, - "Ġ1994": 22736, - "Ġelders": 22737, - "Ġmeinen": 22738, - "Ġjunt": 22739, - "Ġacoust": 22740, - "ĠWohn": 22741, - "Ġbananas": 22742, - "Ġprojection": 22743, - "ĠStick": 22744, - "legt": 22745, - "speed": 22746, - "ĠcÅ©ng": 22747, - "ĠWort": 22748, - "ĠBaltimore": 22749, - "ĠÑĨел": 22750, - "Ġdunno": 22751, - "å¼·": 22752, - "?,": 22753, - "ãĥīãĥ³": 22754, - "ĠLocal": 22755, - "osto": 22756, - "ÐŃ": 22757, - "ода": 22758, - "ĠPortuguese": 22759, - "Ġtheirs": 22760, - "Ġdém": 22761, - "åı¦": 22762, - "Ġdrauf": 22763, - "ĠBuddhist": 22764, - "erta": 22765, - "Ge": 22766, - "Ġcarrot": 22767, - "ĠWonderful": 22768, - "Ġsoak": 22769, - "Ġchairman": 22770, - "ggi": 22771, - "ICA": 22772, - "fried": 22773, - "Ġflick": 22774, - "ĠThroughout": 22775, - "Ġìļ°ë": 22776, - "Ġcough": 22777, - "Ġfluffy": 22778, - "school": 22779, - "Ġripped": 22780, - "--------": 22781, - "ĠZukunft": 22782, - "Ġнеб": 22783, - "Ġsto": 22784, - "ĠBO": 22785, - "pent": 22786, - "ĠLawrence": 22787, - "ÏīÏĤ": 22788, - "sticks": 22789, - "ĠEins": 22790, - "ĠÑĢÑĭ": 22791, - "ĠStrong": 22792, - "Ġcaramel": 22793, - "Ġspite": 22794, - "azar": 22795, - "éĥ½æĺ¯": 22796, - "Ġcritically": 22797, - "Ġobra": 22798, - "owitz": 22799, - "ĠZone": 22800, - "ĠÑĢек": 22801, - "Ġsug": 22802, - "arded": 22803, - "Ġgì": 22804, - "ffentlich": 22805, - "anche": 22806, - "ØŁ": 22807, - "astically": 22808, - "ìĿ¼ë": 22809, - "лав": 22810, - "Ġsimplest": 22811, - "ĠFriend": 22812, - "Ġquello": 22813, - "Ġambition": 22814, - "Ġabbiamo": 22815, - "åºķ": 22816, - "ĠÑĦоÑĢм": 22817, - "ĠEssa": 22818, - "Ġeducators": 22819, - "Ġstatistical": 22820, - "éĢĻéĤĬ": 22821, - "Ġchanger": 22822, - "Ġatau": 22823, - "étais": 22824, - "ĠShakespeare": 22825, - "ëIJĺ": 22826, - "Ġtriggers": 22827, - "Ġrealiz": 22828, - "Ġcelui": 22829, - "wheel": 22830, - "Ġloyalty": 22831, - "Ġscreams": 22832, - "kehr": 22833, - "ĠMega": 22834, - "east": 22835, - "Ġtops": 22836, - "ĠTotally": 22837, - "ountain": 22838, - "lord": 22839, - "Ġviolation": 22840, - "ĠGA": 22841, - "Ġnicer": 22842, - "ĠFresh": 22843, - "ĠMelissa": 22844, - "function": 22845, - "Ġrape": 22846, - "Ġexceptions": 22847, - "Ġsilicon": 22848, - "Ġliberty": 22849, - "Ġhouseholds": 22850, - "ãģįãģ¾ãģĻ": 22851, - "ĠCA": 22852, - "ĠÐŀб": 22853, - "Ġlib": 22854, - "ŀĮ": 22855, - "cific": 22856, - "Ġtropical": 22857, - "Ġinvestigating": 22858, - "HD": 22859, - "Ġadapter": 22860, - "ĠPitt": 22861, - "ancia": 22862, - "ĠShell": 22863, - "friendly": 22864, - "Ġconclusions": 22865, - "Ġturtle": 22866, - "Ġdecomp": 22867, - "Ġanimations": 22868, - "ĠÑģек": 22869, - "insi": 22870, - "Ġretention": 22871, - "kie": 22872, - "Ġinjection": 22873, - "ĠMadison": 22874, - "ì°°": 22875, - "Ġvient": 22876, - "Ġvaried": 22877, - "Ġviolin": 22878, - "ĠBil": 22879, - "Ġluckily": 22880, - "Ġhtt": 22881, - "lä": 22882, - "Ġranch": 22883, - "çľĭçľĭ": 22884, - "Ġsólo": 22885, - "ìķħ": 22886, - "ĠDerek": 22887, - "ĠScripture": 22888, - "оÑĢа": 22889, - "Ġclassrooms": 22890, - "avil": 22891, - "formed": 22892, - "Ġbeforehand": 22893, - "ĠGem": 22894, - "prech": 22895, - "Ġlin": 22896, - "Ġgreens": 22897, - "ÑĨев": 22898, - "ĠMercedes": 22899, - "Ġdrought": 22900, - "gasps": 22901, - "Ġabortion": 22902, - "Ġterribly": 22903, - "Ġsposób": 22904, - "Ġsecured": 22905, - "Ġatrás": 22906, - "Ġwavelength": 22907, - "Ġgrains": 22908, - "ective": 22909, - "Ġspacecraft": 22910, - "Ġtours": 22911, - "Ġprofes": 22912, - "Ġsurgeon": 22913, - "ĠPie": 22914, - "Ġideally": 22915, - "arner": 22916, - "UP": 22917, - "opard": 22918, - "sce": 22919, - "Ġimmense": 22920, - "ĠOrt": 22921, - "roller": 22922, - "ĠDallas": 22923, - "ĠNicholas": 22924, - "Ġsulf": 22925, - "ĠToyota": 22926, - "Ġquantities": 22927, - "ceans": 22928, - "Ġcui": 22929, - "ança": 22930, - "ĠCAN": 22931, - "itzerland": 22932, - "åĦ¿": 22933, - "Ġzou": 22934, - "ĠCyber": 22935, - "legen": 22936, - "ĠInit": 22937, - "edu": 22938, - "Ġapert": 22939, - "Ġadjac": 22940, - "ouv": 22941, - "èĢĮä¸Ķ": 22942, - "rs": 22943, - "Ġcabbage": 22944, - "Ġwheelchair": 22945, - "inyl": 22946, - "ĠDynam": 22947, - "ĠìķĦëĭĪëĿ¼": 22948, - "Ġling": 22949, - "hl": 22950, - "ĠмогÑĥ": 22951, - "Ġcrisp": 22952, - "Ġmij": 22953, - "Ġdug": 22954, - "nin": 22955, - "Ġbloss": 22956, - "Ġbelonging": 22957, - "Ġloudly": 22958, - "Ġminerals": 22959, - "Ġconcluded": 22960, - "Ġsearched": 22961, - "96": 22962, - "ĠMeet": 22963, - "ĠSEO": 22964, - "ĠСк": 22965, - "ĠHob": 22966, - "otta": 22967, - "Ġpropaganda": 22968, - "Ġcinnamon": 22969, - "Ġhunter": 22970, - "Ġgemeins": 22971, - "Ġsculpture": 22972, - "ulsion": 22973, - "Ġväl": 22974, - "Ġmagazines": 22975, - "Ġcontroversy": 22976, - "ä¸Ģ樣": 22977, - "Ġsequences": 22978, - "ãģĦãĤĭ": 22979, - "ĠíļĮ": 22980, - "Ġdeleted": 22981, - "使": 22982, - "IJëıĦ": 22983, - "Ġvarying": 22984, - "ãĥĨ": 22985, - "Ġmounting": 22986, - "Ġaffair": 22987, - "Ġpathways": 22988, - "æ¦": 22989, - "Ġdigo": 22990, - "亮": 22991, - "Ġдок": 22992, - "Alex": 22993, - "Ġtobacco": 22994, - "ĠCV": 22995, - "Ġbothered": 22996, - "Ġambient": 22997, - "inky": 22998, - "ĠSL": 22999, - "Ġhates": 23000, - "Ġjeżeli": 23001, - "Ġcongreg": 23002, - "Ġelas": 23003, - "Ġdeuts": 23004, - "ĠStudios": 23005, - "chÄĻ": 23006, - "Ġdocumented": 23007, - "ĠCruz": 23008, - "ĠLen": 23009, - "ĠDouglas": 23010, - "ĠPortugal": 23011, - "enti": 23012, - "Ġspouse": 23013, - "Ġanalys": 23014, - "avia": 23015, - "Ġedited": 23016, - "Ġlại": 23017, - "built": 23018, - "Ġville": 23019, - "adora": 23020, - "Ġbracelet": 23021, - "Ġsushi": 23022, - "Ġpm": 23023, - "Ġtrails": 23024, - "Ġlug": 23025, - "Ġöver": 23026, - "Ġsorrow": 23027, - "Ġcolony": 23028, - "adox": 23029, - "Ġserie": 23030, - "anyak": 23031, - "ĠØ·": 23032, - "ĠGulf": 23033, - "æĺ¯ä¸įæĺ¯": 23034, - "ĠPV": 23035, - "ĠSamuel": 23036, - "ĠKit": 23037, - "ĠRal": 23038, - "ontin": 23039, - "expl": 23040, - "Ġentries": 23041, - "Ġactivists": 23042, - "Ps": 23043, - "Ġsant": 23044, - "ĠÑĤоÑĩ": 23045, - "ĠBruno": 23046, - "keley": 23047, - "Ġtutto": 23048, - "éĶ": 23049, - "Ġvintage": 23050, - "Ġterrified": 23051, - "ĠпоÑħ": 23052, - "usive": 23053, - "owers": 23054, - "айÑĤ": 23055, - "ëıĻ": 23056, - "Ġtwisted": 23057, - "ĠThought": 23058, - "Ġtah": 23059, - "Ġshrink": 23060, - "Ġsheer": 23061, - "lit": 23062, - "Ġdalam": 23063, - "Ġdib": 23064, - "Ġvard": 23065, - "owane": 23066, - "Ġdobr": 23067, - "ĠRena": 23068, - "ĠÑģвоÑİ": 23069, - "ĠpaÃŃses": 23070, - "ĠEra": 23071, - "ãģ®ãģ§": 23072, - "ĠBUT": 23073, - "sighs": 23074, - "Ġ그거": 23075, - "ĠgroÃŁen": 23076, - "Ġ빨리": 23077, - "Ġnerves": 23078, - "Ġconstit": 23079, - "Ġpreocup": 23080, - "ĠGay": 23081, - "ĠXu": 23082, - "keeper": 23083, - "heure": 23084, - "..)": 23085, - "ĠCalm": 23086, - "ĠUnidos": 23087, - "ĠìĿ´ê²ĥ": 23088, - "ĠAqui": 23089, - "ĠìłľìĿ¼": 23090, - "dır": 23091, - "ì¦ĺ": 23092, - "your": 23093, - "ĠÑįÑĤим": 23094, - "2020": 23095, - "Ġrund": 23096, - "ĠHO": 23097, - "ĠCatherine": 23098, - "ieli": 23099, - "Ġfusion": 23100, - "Ġideology": 23101, - "Ġforam": 23102, - "shaped": 23103, - "ĠíĽĦë": 23104, - "Ġwt": 23105, - "Ġretr": 23106, - "Ġpréc": 23107, - "Ġê°ij": 23108, - "Ġopenly": 23109, - "vity": 23110, - "구ìļĶ": 23111, - "Ġobstacle": 23112, - "Ġboo": 23113, - "Ġseiner": 23114, - "icorn": 23115, - "Ġeigenlijk": 23116, - "Ġheader": 23117, - "aremos": 23118, - "Ġsofter": 23119, - "ĠÐŁÐ¾Ð´": 23120, - "Ġprejud": 23121, - "Ġdefines": 23122, - "ierte": 23123, - "Ġblending": 23124, - "Ġbelievers": 23125, - "ĠWochen": 23126, - "Ġникак": 23127, - "ĠÐļогда": 23128, - "ĠTypically": 23129, - "Ġíģ¬": 23130, - "管": 23131, - "cios": 23132, - "Ġmissiles": 23133, - "Ġsponge": 23134, - "ĠKitchen": 23135, - "Ġtren": 23136, - "ningen": 23137, - "Ġscrap": 23138, - "Ġserait": 23139, - "´ìł": 23140, - "ç¹": 23141, - "Ġë°ĺë": 23142, - "Ġrestored": 23143, - "ĠprzykÅĤad": 23144, - "ĠKubernetes": 23145, - "Ġsait": 23146, - "Ġuw": 23147, - "Ġenabling": 23148, - "Ġtravers": 23149, - "amps": 23150, - "åıĹ": 23151, - "ĠOMG": 23152, - "ensor": 23153, - "Ġzosta": 23154, - "Ġpronounced": 23155, - "Ang": 23156, - "normal": 23157, - "Ġeconomies": 23158, - "tin": 23159, - "ĠChampion": 23160, - "izen": 23161, - "Ġarbeiten": 23162, - "ĠGospel": 23163, - "ĠZu": 23164, - "nga": 23165, - "Ġliteracy": 23166, - "ĠMans": 23167, - "Ġcirculation": 23168, - "Ġadap": 23169, - "ĠTotal": 23170, - "Ġmereka": 23171, - "Ġolacak": 23172, - "ÑģÑĤаÑĤи": 23173, - "Jack": 23174, - "Ġmund": 23175, - "Ġthief": 23176, - "bies": 23177, - "Ġê²ģ": 23178, - "aque": 23179, - "ĠÚ©ÛĮ": 23180, - "ĠScar": 23181, - "å²": 23182, - "Ġabol": 23183, - "Ġdevote": 23184, - "Ġ01": 23185, - "Ġsitten": 23186, - "ĠVisual": 23187, - "week": 23188, - "some": 23189, - "ingt": 23190, - "Ġjournalism": 23191, - "ĠHir": 23192, - "ĠBachelor": 23193, - "inery": 23194, - "ÃľND": 23195, - "ãĥŁ": 23196, - "ç»Ļ": 23197, - "Ġcoloring": 23198, - "ĠCrist": 23199, - "Ġcelebrities": 23200, - "ĠÑĩиÑģ": 23201, - "ĠCrit": 23202, - "Ġdifferentiate": 23203, - "ĠÐľÐ½Ðµ": 23204, - "elim": 23205, - "Ġseafood": 23206, - "Ġalgumas": 23207, - "otherapy": 23208, - "æĪ°": 23209, - "Ġglaub": 23210, - "Ġarbitrary": 23211, - "gens": 23212, - "ĠбÑĥдем": 23213, - "Ġtav": 23214, - "Ġcreamy": 23215, - "ĠCountry": 23216, - "añ": 23217, - "меÑĤ": 23218, - "Ġhinter": 23219, - "Ġmism": 23220, - "Ġillustrate": 23221, - "ÃľNDNIS": 23222, - "Ġdecreasing": 23223, - "Ġweniger": 23224, - "AKI": 23225, - "ixon": 23226, - "Ġней": 23227, - "Ġfatto": 23228, - "Ġnerd": 23229, - "çł": 23230, - "Ġbitte": 23231, - "Per": 23232, - "Ġtane": 23233, - "Ġgöz": 23234, - "Ġforte": 23235, - "ĠEy": 23236, - "ĠнавеÑĢ": 23237, - "被": 23238, - "ĠWordPress": 23239, - "ĠMis": 23240, - "ů": 23241, - "zäh": 23242, - "Ġintéress": 23243, - "osaurs": 23244, - "ĠFalls": 23245, - "Ġnessa": 23246, - "97": 23247, - "Ġmuseums": 23248, - "Ġcorresponds": 23249, - "Ġsings": 23250, - "four": 23251, - "Ġeder": 23252, - "ĠCommunist": 23253, - "oa": 23254, - "nek": 23255, - "ĠWHO": 23256, - "Ġcorpo": 23257, - "Ġmessing": 23258, - "ÏĦαι": 23259, - "Ġbrushes": 23260, - "Ġbisc": 23261, - "ĠArbeits": 23262, - "ĠTax": 23263, - "Ġsele": 23264, - "Ġflags": 23265, - "oupe": 23266, - "Ġanticipated": 23267, - "ãĥij": 23268, - "ĠNad": 23269, - "Ġpoured": 23270, - "Ġml": 23271, - "Ġllama": 23272, - "Ġvisualize": 23273, - "Ġlisteners": 23274, - "ÙĦÙĥ": 23275, - "alten": 23276, - "Michael": 23277, - "Ġcosì": 23278, - "Õ¡Õ": 23279, - "opus": 23280, - "Ġíķ´ì£¼": 23281, - "Ġhike": 23282, - "ĠAttorney": 23283, - "ĠHillary": 23284, - "uded": 23285, - "Ġíķĺì§Ģë§Į": 23286, - "Ġdove": 23287, - "Ġstorms": 23288, - "акÑģ": 23289, - "Ġdoctrine": 23290, - "Ġhex": 23291, - "iks": 23292, - "noÅĽÄĩ": 23293, - "Ġscripts": 23294, - "Ġδεν": 23295, - "ĠÑįÑĤиÑħ": 23296, - "ĠÐĨ": 23297, - "aber": 23298, - "ĠVas": 23299, - "Ġcentimeters": 23300, - "×ŀ×Ķ": 23301, - "ниб": 23302, - "Ġriders": 23303, - "ĠTrib": 23304, - "åĮħ": 23305, - "Ġtakże": 23306, - "Ġnoun": 23307, - "Ġicons": 23308, - "Ġsolely": 23309, - "minded": 23310, - "Ġdispon": 23311, - "ĠSwitzerland": 23312, - "Ġclusters": 23313, - "Ġqueda": 23314, - "ailing": 23315, - "Ġmanga": 23316, - "Ġ68": 23317, - "ĦĪ": 23318, - "Ġtet": 23319, - "gins": 23320, - "haus": 23321, - "空": 23322, - "å·¥": 23323, - "ĠOP": 23324, - "oted": 23325, - "Ġnouveau": 23326, - "ALLY": 23327, - "ÙĪد": 23328, - "òn": 23329, - "Ġmortality": 23330, - "ĠGitHub": 23331, - "drop": 23332, - "Ġdisgu": 23333, - "Ġrecom": 23334, - "Ġlocals": 23335, - "Ġhomemade": 23336, - "amba": 23337, - "Ġpronunciation": 23338, - "Ġalphabet": 23339, - "анÑĮ": 23340, - "owany": 23341, - "iras": 23342, - "idency": 23343, - "OME": 23344, - "ĠÑĢаÑģÑģ": 23345, - "arak": 23346, - "viamente": 23347, - "Ġnonprofit": 23348, - "ĠYouTuber": 23349, - "Ġparenth": 23350, - "ĠBoo": 23351, - "vat": 23352, - "ĠStir": 23353, - "Ġprecip": 23354, - "Ġants": 23355, - "Ġally": 23356, - "ĠMaori": 23357, - "ĠëĮĢíķľ": 23358, - "åı¯æĺ¯": 23359, - "ogene": 23360, - "ĠLabour": 23361, - "arette": 23362, - "Ġrecycling": 23363, - "ensa": 23364, - "Ġpursuit": 23365, - "Ġsak": 23366, - "ĠÐĹдеÑģÑĮ": 23367, - "Ġtolerance": 23368, - "Ġsaat": 23369, - "Ġclicked": 23370, - "âĻ¥": 23371, - "Ġfacebook": 23372, - "ĠInto": 23373, - "Ġincentives": 23374, - "기ëĬĶ": 23375, - "ĠDennis": 23376, - "ĠWik": 23377, - "gesch": 23378, - "à¹Ģà¸Ľ": 23379, - "ĠÏĢα": 23380, - "ĠWhoo": 23381, - "Ġrounded": 23382, - "Ġdope": 23383, - "Ġcapturing": 23384, - "ĠWarri": 23385, - "Ġcivilian": 23386, - "Ġcharming": 23387, - "Ġesas": 23388, - "Ġsustained": 23389, - "Ġleaning": 23390, - "Ġabundance": 23391, - "ÃŃlia": 23392, - "алÑĮнÑĭй": 23393, - "Ġphải": 23394, - "acja": 23395, - "Ġê°ĻìķĦ": 23396, - "activ": 23397, - "าย": 23398, - "Ġ97": 23399, - "Ġмой": 23400, - "cro": 23401, - "ĠJackie": 23402, - "ittees": 23403, - "bracht": 23404, - "ulent": 23405, - "Ġìłľë": 23406, - "Ġplugin": 23407, - "vantage": 23408, - "party": 23409, - "Ġsuas": 23410, - "Ġante": 23411, - "Ñĥл": 23412, - "ÐĿÐIJ": 23413, - "æĤ¨": 23414, - "ĠÏĥÏħ": 23415, - "Ġmeth": 23416, - "Ġenthusiasm": 23417, - "ÑıÑĤÑģÑı": 23418, - "íĻĶë": 23419, - "Ġsynthetic": 23420, - "Ġseasoning": 23421, - "ĠLost": 23422, - "onomy": 23423, - "ĠSpark": 23424, - "Ġbure": 23425, - "Ġassured": 23426, - "Ġimagin": 23427, - "Ġcarro": 23428, - "Sha": 23429, - "Äħt": 23430, - "нÑĥÑĤÑĮ": 23431, - "ática": 23432, - "TY": 23433, - "Ġkern": 23434, - "ĠBrazilian": 23435, - "ð": 23436, - "Ġsuspended": 23437, - "ĠCarib": 23438, - "Ġbizim": 23439, - "ĠOliver": 23440, - "ãģ¶": 23441, - "Tom": 23442, - "Ġплан": 23443, - "Ġnope": 23444, - "omething": 23445, - "Ġbeiden": 23446, - "ÑĨен": 23447, - "Ġfluct": 23448, - "ĠμοÏħ": 23449, - "Ġfathers": 23450, - "ĠBlake": 23451, - "Ġupward": 23452, - "ĠDash": 23453, - "ĠLil": 23454, - "ĠìĪĺëıĦ": 23455, - "Ġrevelation": 23456, - "Ġelevated": 23457, - "ĠJiang": 23458, - "LED": 23459, - "ĠThompson": 23460, - "ĠмогÑĥÑĤ": 23461, - "ÑģÑĤÑĢÑĥ": 23462, - "ifiers": 23463, - "Ġcomeback": 23464, - "Ġbuyers": 23465, - "ê²°": 23466, - "ĠSales": 23467, - "иÑĩе": 23468, - "ciones": 23469, - "Ġwhistle": 23470, - "Ġdull": 23471, - "LEX": 23472, - "Ġíķĺê²łìĬµëĭĪëĭ¤": 23473, - "Ġcriminals": 23474, - "Ġdescent": 23475, - "ipple": 23476, - "ması": 23477, - "Ġfoolish": 23478, - "ĠдÑĥмаÑİ": 23479, - "tar": 23480, - "Ġmango": 23481, - "Ġchoreography": 23482, - "Matt": 23483, - "Ġterritor": 23484, - "Ġacaba": 23485, - "ĠEinstein": 23486, - "ĠIBM": 23487, - "ĠMetal": 23488, - "ĠCrystal": 23489, - "Ġrah": 23490, - "Ġfoul": 23491, - "ĠIslands": 23492, - "Ġintact": 23493, - "ĠRail": 23494, - ".:": 23495, - "Ġacá": 23496, - "ĠпÑĢоп": 23497, - "еÑĢе": 23498, - "ĠWrite": 23499, - "hehe": 23500, - "ĠFO": 23501, - "ĠÏĥÏĦη": 23502, - "Ġdoin": 23503, - "held": 23504, - "Ġappropriately": 23505, - "Ġdeliberately": 23506, - "Ġarchive": 23507, - "Ġgiveaway": 23508, - "ãģĵãģĵ": 23509, - "Ġfinale": 23510, - "лаÑģ": 23511, - "ено": 23512, - "Æ¡n": 23513, - "æ£Ĵ": 23514, - "ogo": 23515, - "çī©": 23516, - "ĠAudience": 23517, - "ãħł": 23518, - "Ġsubur": 23519, - "Ġheadache": 23520, - "аннÑı": 23521, - "ĠWitch": 23522, - "ĠSwedish": 23523, - "ĠBI": 23524, - "Ġerase": 23525, - "Ġkhi": 23526, - "Ġcommentary": 23527, - "ĠSultan": 23528, - "íĥĿ": 23529, - "ĠLeban": 23530, - "Ġë³´ìĭ": 23531, - "ĠPam": 23532, - "pekt": 23533, - "month": 23534, - "Ġgrounded": 23535, - "ê¾": 23536, - "ĠÅŁekilde": 23537, - "250": 23538, - "ĠSCH": 23539, - "ioso": 23540, - "Ġinaug": 23541, - "heimer": 23542, - "Ġreflecting": 23543, - "ĠRuth": 23544, - "ĠOil": 23545, - "Ġtrouver": 23546, - "uep": 23547, - "..]": 23548, - "ĠìŀĪë": 23549, - "Ġolha": 23550, - "Ġreasonably": 23551, - "Ġglitch": 23552, - "UB": 23553, - "ĠGran": 23554, - "Ġadalah": 23555, - "Ġlent": 23556, - "را": 23557, - "Ġtraction": 23558, - "Ġadjusting": 23559, - "´¤": 23560, - "нибÑĥдÑĮ": 23561, - "Ġдоп": 23562, - "Ġstretched": 23563, - "Ġort": 23564, - "Ġcosine": 23565, - "viol": 23566, - "Ġìħ": 23567, - "cir": 23568, - "Ġbastard": 23569, - "ä¸ĩ": 23570, - "ĠÑħод": 23571, - "Ġquier": 23572, - "Ġpressures": 23573, - "ĠAnh": 23574, - "å¹¾": 23575, - "Ġelles": 23576, - "ĠдÑĢÑĥз": 23577, - "ĠможеÑĤе": 23578, - "Ġchá»": 23579, - "ĠMé": 23580, - "ök": 23581, - "ầu": 23582, - "ìłĪ": 23583, - "zin": 23584, - "Ġcaution": 23585, - "iban": 23586, - "Ġjudging": 23587, - "ÑĥÑİÑĤ": 23588, - "Ġbaj": 23589, - "ĠСейÑĩаÑģ": 23590, - "ĠPoor": 23591, - "ĠNazi": 23592, - "Ġupbeat": 23593, - "yang": 23594, - "Ġweekends": 23595, - "ĠEssentially": 23596, - "Ġoluyor": 23597, - "Ġspatial": 23598, - "acker": 23599, - "Ġseller": 23600, - "Ġ×IJ×ķת": 23601, - "ij׾": 23602, - "Ġvivid": 23603, - "ĠBond": 23604, - "ê¶Į": 23605, - "iskt": 23606, - "ãĤµ": 23607, - "Ġgoat": 23608, - "driver": 23609, - "Ġmug": 23610, - "ictional": 23611, - "Ġallt": 23612, - "ĠIniti": 23613, - "ĠRand": 23614, - "Ġfinishes": 23615, - "Ġê°Ī": 23616, - "Ġvitam": 23617, - "Ġteenagers": 23618, - "ĠMorris": 23619, - "ì¤Ħ": 23620, - "ĠOri": 23621, - "iya": 23622, - "Ġmyös": 23623, - "Step": 23624, - "ĠKre": 23625, - "辦": 23626, - "Ġdinosaur": 23627, - "Ġëªĩ": 23628, - "affe": 23629, - "ĠëIJ©ëĭĪëĭ¤": 23630, - "Ġzeg": 23631, - "åĪĩ": 23632, - "ĠManhattan": 23633, - "Ġsujet": 23634, - "uelle": 23635, - "stoff": 23636, - "Ġdür": 23637, - "Ġsubmar": 23638, - "eses": 23639, - "Ġaquele": 23640, - "Ġnou": 23641, - "ĠFaith": 23642, - "tz": 23643, - "ĠÑĤомÑĥ": 23644, - "aceut": 23645, - "liers": 23646, - "Ġbandwidth": 23647, - "Æ°á»Ŀ": 23648, - "Ġrespective": 23649, - "ĠAve": 23650, - "Ġspreadshe": 23651, - "ĠSent": 23652, - "icamente": 23653, - "Ġinfra": 23654, - "Ġlearners": 23655, - "Ġà®ī": 23656, - "aiah": 23657, - "renal": 23658, - "Ġmustard": 23659, - "Ġhabt": 23660, - "çĥ": 23661, - "ĠQué": 23662, - "Ġanalyzing": 23663, - "æ¯ı": 23664, - "Ġsolic": 23665, - "Ġ×Ķ×ķ×IJ": 23666, - "Ġcausa": 23667, - "Ġwelcomed": 23668, - "ĠSuccess": 23669, - "Ġfacile": 23670, - "ĠÐŁÐ¾ÑĤомÑĥ": 23671, - "schein": 23672, - "Ġfetch": 23673, - "Ġstrat": 23674, - "ĠÑģÑĤоиÑĤ": 23675, - "ìĹIJìĦľëĬĶ": 23676, - "ĠÑģпоÑģоб": 23677, - "mam": 23678, - "ĠserÃŃa": 23679, - "naments": 23680, - "writer": 23681, - "Ġconsulting": 23682, - "íĺĢ": 23683, - "ĠBerkeley": 23684, - "eu": 23685, - "asive": 23686, - "UU": 23687, - "ĠAnalyt": 23688, - "Ġsubmission": 23689, - "Ġmagnificent": 23690, - "enza": 23691, - "Ġecon": 23692, - "Ġprofiles": 23693, - "Ġincar": 23694, - "Ab": 23695, - "ĠNun": 23696, - "Ġhic": 23697, - "screaming": 23698, - "Ġresilient": 23699, - "åĪ©": 23700, - "grund": 23701, - "Ġconcur": 23702, - "Ġbereits": 23703, - "LD": 23704, - "Ġnurt": 23705, - "ìī": 23706, - "Ġfeast": 23707, - "Ġencuent": 23708, - "ĠMichel": 23709, - "Ġsuprem": 23710, - "\"]": 23711, - "Ġfeeds": 23712, - "ĠKollegen": 23713, - "isser": 23714, - "ĠFeng": 23715, - "ĠWen": 23716, - "mun": 23717, - "ĠtenÃŃa": 23718, - "ĠWrest": 23719, - "Ġìĺ¤ëĬĺìĿĢ": 23720, - "Ġstead": 23721, - "Ġrestoration": 23722, - "Ġdonated": 23723, - "Ġdels": 23724, - "Ġcensus": 23725, - "Ġdesperately": 23726, - "worthy": 23727, - "HE": 23728, - "ĠSpa": 23729, - "ĠBryan": 23730, - "Ġhj": 23731, - "ĠRaw": 23732, - "ìķĦë": 23733, - "ĠCamera": 23734, - "Ġzien": 23735, - "Ġstyl": 23736, - "ĠTW": 23737, - "ĠCheese": 23738, - "borne": 23739, - "Ġobl": 23740, - "ĠAlready": 23741, - "Ġunstable": 23742, - "Ġflames": 23743, - "post": 23744, - "Ha": 23745, - "romagn": 23746, - "ĠìĹĦë§Ī": 23747, - "dest": 23748, - "Ġkolej": 23749, - "Ġtemporarily": 23750, - "Ġdetermining": 23751, - "ĠGlass": 23752, - "ÑĢон": 23753, - "olan": 23754, - "Ġdominated": 23755, - "åĮĸ": 23756, - "____": 23757, - "ĠÙĩذا": 23758, - "ĠDana": 23759, - "Ġdinheiro": 23760, - "aqu": 23761, - "민": 23762, - "ĠÃłs": 23763, - "ĠJoey": 23764, - "ĠGriff": 23765, - "Ġattain": 23766, - "Ġtransitions": 23767, - "ĠLiterally": 23768, - "енд": 23769, - "ĠHaven": 23770, - "Ġgrabbing": 23771, - "Ġcrystals": 23772, - "ĠFourth": 23773, - "Ġcandles": 23774, - "ĠÑģлÑĥÑĩа": 23775, - "rico": 23776, - "Ġ5000": 23777, - "etto": 23778, - "Ġundo": 23779, - "Ġkto": 23780, - "Ġdivert": 23781, - "Ġchir": 23782, - "Ġpersec": 23783, - "Ġhiking": 23784, - "Ġannouncements": 23785, - "çĶ±": 23786, - "зÑĭ": 23787, - "Ġauc": 23788, - "Ġsystemic": 23789, - "ĠRM": 23790, - "Ïĥα": 23791, - "ĠÐĶж": 23792, - "Ġyar": 23793, - "ĠWard": 23794, - "Ġpissed": 23795, - "Ġcarn": 23796, - "Ġautonomous": 23797, - "ãħİãħİ": 23798, - "sover": 23799, - "æ²ĴéĮ¯": 23800, - "å¾Ī好": 23801, - "Ġreflex": 23802, - "Ġgardens": 23803, - "Ġdated": 23804, - "ì±": 23805, - "amiÄĻ": 23806, - "Ġcontinuity": 23807, - "Ġcitizenship": 23808, - "Ġschwer": 23809, - "Ġzak": 23810, - "table": 23811, - "ĠÑģÑĩ": 23812, - "è§ģ": 23813, - "ĠÏĥε": 23814, - "Ġgenerates": 23815, - "구ëĤĺ": 23816, - "öh": 23817, - "óm": 23818, - "alam": 23819, - "ĠJUDY": 23820, - "ĠBug": 23821, - "Ġãģ¦": 23822, - "Ġdrones": 23823, - "Ġágua": 23824, - "acaks": 23825, - "æļ": 23826, - "ĠÐļон": 23827, - "×ĸ×Ķ": 23828, - "Ġstrive": 23829, - "ĠAltern": 23830, - "Ġnearest": 23831, - "Ġproyect": 23832, - "tera": 23833, - "ĠASHLEY": 23834, - "Ġworm": 23835, - "Ġreplay": 23836, - "Ġtara": 23837, - "ĠIndians": 23838, - "ãĤ°": 23839, - "icaid": 23840, - "ĠìĪľ": 23841, - "Ġappealing": 23842, - "ĠWes": 23843, - "Ġmentions": 23844, - "Ġделе": 23845, - "Ġkw": 23846, - "Ġfragile": 23847, - "isz": 23848, - "ków": 23849, - "hang": 23850, - "color": 23851, - "Ġpresidente": 23852, - "87": 23853, - "еÑĦ": 23854, - "çĪ¸": 23855, - "Ġдобав": 23856, - "ĠNelson": 23857, - "áfic": 23858, - "ĠMICHAEL": 23859, - "Ġmechanic": 23860, - "Ġmetres": 23861, - "ĠoczywiÅĽcie": 23862, - "ĠCind": 23863, - "ĠogsÃ¥": 23864, - "Ġlandsca": 23865, - "ACE": 23866, - "Ġheadlines": 23867, - "Ġcatalyst": 23868, - "ĠCatch": 23869, - "inkles": 23870, - "Ġpills": 23871, - "ordo": 23872, - "Ġimmigrant": 23873, - "Ġexamination": 23874, - "Ġaccidents": 23875, - "zÄħd": 23876, - "Ġquiere": 23877, - "Ġnella": 23878, - "Ġ67": 23879, - "Ġpassa": 23880, - "Ġsuperfic": 23881, - "istor": 23882, - "Ġnov": 23883, - "ëĭµ": 23884, - "Ġmandate": 23885, - "isons": 23886, - "ĠVirtual": 23887, - "Ġselber": 23888, - "Ġcounseling": 23889, - "ĠNBA": 23890, - "Ġsept": 23891, - "Ġbeliever": 23892, - "Ġmarvel": 23893, - "ĠIntegr": 23894, - "ĠмÑĸ": 23895, - "Ġorph": 23896, - "Ġbackward": 23897, - "ĠGeneration": 23898, - "ĠPict": 23899, - "ĠÑĤоÑĤ": 23900, - "Ġtapi": 23901, - "prochen": 23902, - "Ġhallway": 23903, - "hte": 23904, - "ĠÛģÛĴ": 23905, - "ĠZum": 23906, - "èĢģ師": 23907, - "achment": 23908, - "iquer": 23909, - "folg": 23910, - "ĠEddie": 23911, - "ĠKil": 23912, - "Ġwellness": 23913, - "stock": 23914, - "è¼ĥ": 23915, - "Ġkaç": 23916, - "Ġterrorism": 23917, - "Ġpointer": 23918, - "Of": 23919, - "heric": 23920, - "ĠUltimately": 23921, - "Ġmeses": 23922, - "ĠTrade": 23923, - "Ġpint": 23924, - "Ġtuition": 23925, - "Ġdisagre": 23926, - "Ġê²ĮìŀĦ": 23927, - "Ġmanuscript": 23928, - "Ġroomm": 23929, - "Ġoutputs": 23930, - "еÑĨи": 23931, - "Ġries": 23932, - "Ġsalud": 23933, - "otzdem": 23934, - "Ġmasses": 23935, - "ĠbyÅĤa": 23936, - "Ġclearing": 23937, - "Ġdiscourse": 23938, - "atson": 23939, - "Ġfolded": 23940, - "ĠJar": 23941, - "ÙĦÙī": 23942, - "900": 23943, - "ĠÑĥÑģп": 23944, - "Ġprophecy": 23945, - "Ġinterfere": 23946, - "иÑħод": 23947, - "à¹Į": 23948, - "Ġthri": 23949, - "Ġ×ŀש": 23950, - "Ġlazım": 23951, - "Ġ1992": 23952, - "Ġfuturo": 23953, - "Ġlocking": 23954, - "Ġembargo": 23955, - "ĠNeither": 23956, - "ivamente": 23957, - "ĠmÃ¥ste": 23958, - "Ġmik": 23959, - "Ġcollector": 23960, - "екоÑĤоÑĢ": 23961, - "ĠGand": 23962, - "Ġsentir": 23963, - "ĠMight": 23964, - "å¡Ķ": 23965, - "Ġganzen": 23966, - "UC": 23967, - "Ġrelating": 23968, - "SD": 23969, - "Ġmosquito": 23970, - "GR": 23971, - "Ġhollow": 23972, - "âĺħ": 23973, - "ĠWalker": 23974, - "Ġaffiliate": 23975, - "Ġduplicate": 23976, - "нем": 23977, - "Ġgrape": 23978, - "ĠOrganization": 23979, - "Ġsynt": 23980, - "Joe": 23981, - "Ġgeg": 23982, - "Ġrevealing": 23983, - "ĠEthan": 23984, - "outer": 23985, - "Ġyay": 23986, - "é«Ķ": 23987, - "лаÑĢ": 23988, - "Ġreportedly": 23989, - "Ġihrer": 23990, - "Ġrecognise": 23991, - "Ġbumper": 23992, - "ĠRandy": 23993, - "ĠVenus": 23994, - "tles": 23995, - "Ġappetite": 23996, - "Ġglucose": 23997, - "Ġchodzi": 23998, - "ĠFurthermore": 23999, - "tir": 24000, - "Ġconta": 24001, - "Ġintuition": 24002, - "Ġaltitude": 24003, - "Ġchunks": 24004, - "ĠJoshua": 24005, - "ıģım": 24006, - "rylic": 24007, - "leans": 24008, - "ĠíĶ¼ë": 24009, - "LL": 24010, - "Que": 24011, - "Ġgor": 24012, - "ĠзнаÑĩиÑĤ": 24013, - "Ġpoems": 24014, - "Ġexcel": 24015, - "Ġexplored": 24016, - "Ġpopul": 24017, - "Ġincluso": 24018, - "stä": 24019, - "ĠGavin": 24020, - "alling": 24021, - "ĠÏĦον": 24022, - "é©": 24023, - "arbeit": 24024, - "ĠGas": 24025, - "Ġglorious": 24026, - "rieben": 24027, - "Ġspam": 24028, - "Ġindoor": 24029, - "Ġthrust": 24030, - "ĠAld": 24031, - "ĠPrior": 24032, - "Ġonboard": 24033, - "ãģłãģķãģĦ": 24034, - "oca": 24035, - "ASH": 24036, - "£ł": 24037, - "ĠChristine": 24038, - "Ġdrawer": 24039, - "Ġnoon": 24040, - "Ġìŀĺë": 24041, - "Ġpermanently": 24042, - "æ·±": 24043, - "ĠнапÑĢимеÑĢ": 24044, - "Ġpodcasts": 24045, - "erapeut": 24046, - "prit": 24047, - "Ġstainless": 24048, - "ĠÚ©ÛĴ": 24049, - "Ġfamilia": 24050, - "ĠÑĢазÑĢ": 24051, - "unto": 24052, - "ĠÑģÑĤол": 24053, - "Ġhä": 24054, - "ĠHai": 24055, - "ĠPB": 24056, - "izon": 24057, - "Ġkonnte": 24058, - "Ġbüyük": 24059, - "Ġutilizar": 24060, - "ÚĨ": 24061, - "Ġaquesta": 24062, - "Ġmixer": 24063, - "udent": 24064, - "лекÑģ": 24065, - "ÅĤu": 24066, - "ĠÑģиÑģÑĤем": 24067, - "ĠноÑĢм": 24068, - "Ġfatal": 24069, - "Ġconsiderations": 24070, - "Ġvalidation": 24071, - "Ġoli": 24072, - "ĠkardeÅŁ": 24073, - "ĠGLORIA": 24074, - "Ġpall": 24075, - "еÑģÑĤе": 24076, - "Ġrectang": 24077, - "Ġmedieval": 24078, - "allahi": 24079, - "asti": 24080, - "ĠSyrian": 24081, - "Ġshear": 24082, - "Ġdebug": 24083, - "ĠMai": 24084, - "Ġknocking": 24085, - "ĠLex": 24086, - "ardan": 24087, - "rov": 24088, - "Ġmemorial": 24089, - "æ°£": 24090, - "ooky": 24091, - "Ġstuffed": 24092, - "Ġpassé": 24093, - "Ġwig": 24094, - "Ĥł": 24095, - "Ġpróxima": 24096, - "Ġ1991": 24097, - "ĠмеждÑĥ": 24098, - "Ġnuestros": 24099, - "ĠBeast": 24100, - "Ġsmo": 24101, - "atched": 24102, - "ologia": 24103, - "Ġмод": 24104, - "Ġgee": 24105, - "Ġconceptual": 24106, - "Ġô": 24107, - "Ġdecreases": 24108, - "Ġqueries": 24109, - "олÑĮÑĪ": 24110, - "ĠApart": 24111, - "Ġexempl": 24112, - "å±±": 24113, - "Ġfled": 24114, - "ĠOFF": 24115, - "ggak": 24116, - "Ġbead": 24117, - "hir": 24118, - "lies": 24119, - "ĠClearly": 24120, - "ılar": 24121, - "Ġchess": 24122, - "Ġwhichever": 24123, - "Ġ96": 24124, - "ằ": 24125, - "Ġrespects": 24126, - "ĠмоÑĢ": 24127, - "Ġorganism": 24128, - "Ġgrandpa": 24129, - "ĠVie": 24130, - "è·Łä½ł": 24131, - "Ġflooding": 24132, - "Ġupgraded": 24133, - "ÑijÑĢ": 24134, - "Ġcheeks": 24135, - "Ġconquer": 24136, - "Ġstubborn": 24137, - "Ġpuzzles": 24138, - "Ġauction": 24139, - "Ġrelying": 24140, - "ĠPROF": 24141, - "ĠEsper": 24142, - "ĠÐľÐ£": 24143, - "Ġhype": 24144, - "Ġpossibil": 24145, - "Ġimprison": 24146, - "ĠErn": 24147, - "ìĹĪìĬµëĭĪëĭ¤": 24148, - "Ġenvie": 24149, - "Ġresurrection": 24150, - "ä¸įè¡Į": 24151, - "Ġsper": 24152, - "ĠVenezuela": 24153, - "som": 24154, - "Ġìŀłê¹": 24155, - "Ġnouvelle": 24156, - "Ġcloses": 24157, - "Ġ1940": 24158, - "Ġqua": 24159, - "ĠJared": 24160, - "ĠPir": 24161, - "Ġinde": 24162, - "Ġscrub": 24163, - "uku": 24164, - "Ġrequiring": 24165, - "Ġвами": 24166, - "Ġconsiderable": 24167, - "åIJĽ": 24168, - "ilia": 24169, - "Ġinne": 24170, - "Ġmeinem": 24171, - "Ġhardship": 24172, - "Ġtraps": 24173, - "roc": 24174, - "ĠìĦ¤ë": 24175, - "Ġresearching": 24176, - "ĠMargaret": 24177, - "Ġpenny": 24178, - "Ġbırak": 24179, - "Ñijл": 24180, - "Ġwool": 24181, - "Ġrhet": 24182, - "Ġflatten": 24183, - "çĩ": 24184, - "à¹Ģร": 24185, - "Ġpied": 24186, - "ĠChap": 24187, - "Ġunderm": 24188, - "Ġfret": 24189, - "Ġcrashed": 24190, - "ĠFrauen": 24191, - "Ø°Ùĩ": 24192, - "ivan": 24193, - "Ġliterary": 24194, - "latego": 24195, - "Ġspäter": 24196, - "Ġsimilarities": 24197, - "âĨ": 24198, - "ĠCoron": 24199, - "ĠCreek": 24200, - "Ġbosses": 24201, - "Ġaccompanied": 24202, - "Ġdebates": 24203, - "Ġassembled": 24204, - "ĠÃģ": 24205, - "ĠVai": 24206, - "Ġtract": 24207, - "Ġsimplement": 24208, - "ĠArin": 24209, - "Ġvulnerability": 24210, - "Ġhormone": 24211, - "IEL": 24212, - "OOK": 24213, - "Ġrelay": 24214, - "ĠAndrea": 24215, - "ril": 24216, - "Ġnecessity": 24217, - "aceutical": 24218, - "ÑİÑī": 24219, - "ousing": 24220, - "nahmen": 24221, - "Ġfootprint": 24222, - "map": 24223, - "ĠTier": 24224, - "annya": 24225, - "intend": 24226, - "åĸ®": 24227, - "å¢": 24228, - "Ġdecorate": 24229, - "Ġzombies": 24230, - "ĠHyd": 24231, - "ĠSuz": 24232, - "Ġcampuses": 24233, - "ĠEmb": 24234, - "Ġthrottle": 24235, - "Ġadmin": 24236, - "Ġoportun": 24237, - "Ġmirrors": 24238, - "Ġidentities": 24239, - "ĠClin": 24240, - "Ġë¹Ħë": 24241, - "á¹£": 24242, - "ĠOtt": 24243, - "Ġblues": 24244, - "Ġimpressions": 24245, - "-,": 24246, - "Ġvague": 24247, - "afe": 24248, - "Ġinferior": 24249, - "erald": 24250, - "Ġmedicines": 24251, - "Ġpregunta": 24252, - "osely": 24253, - "Ġtélé": 24254, - "ĠMonth": 24255, - "ĠLeaders": 24256, - "ĠEgyptian": 24257, - "Ġration": 24258, - "kers": 24259, - "heits": 24260, - "Ġrecht": 24261, - "Play": 24262, - "Ġeg": 24263, - "Ġpolls": 24264, - "ĠWOODR": 24265, - "Ġslots": 24266, - "jam": 24267, - "Both": 24268, - "ĠRat": 24269, - "ÑĢаж": 24270, - "ĠBright": 24271, - "ä¸Ģå®ļ": 24272, - "á»iji": 24273, - "urious": 24274, - "Ġsingers": 24275, - "Ġlogin": 24276, - "Ġtêm": 24277, - "lation": 24278, - "ĠMum": 24279, - "Æ°á»Ŀng": 24280, - "ĠEditor": 24281, - "åIJij": 24282, - "Ġinnovations": 24283, - "have": 24284, - "ĠSek": 24285, - "Ġweaker": 24286, - "ĠGob": 24287, - "After": 24288, - "´ì§Ģ": 24289, - "Ġë¬¸ìłľ": 24290, - "ãĥ¼ãĥ¼": 24291, - "Ġdisadvantage": 24292, - "確": 24293, - "Ġgaze": 24294, - "ĠMack": 24295, - "Ïģί": 24296, - "ĠKiss": 24297, - "ĠHolo": 24298, - "ĠBirth": 24299, - "izi": 24300, - "bab": 24301, - "ä¿Ŀ": 24302, - "ìĭľê³ł": 24303, - "деÑĢж": 24304, - "Ġsquat": 24305, - "кÑĥÑģ": 24306, - "uni": 24307, - "ĠComme": 24308, - "ĠWOODRUFF": 24309, - "ĠChampionship": 24310, - "Ġwelche": 24311, - "ĠYouth": 24312, - "zem": 24313, - "Ġodpow": 24314, - "Ġpersistent": 24315, - "rut": 24316, - "ìĶ©": 24317, - "íĸ¥": 24318, - "lair": 24319, - "iku": 24320, - "Ġvendor": 24321, - "Ġchúng": 24322, - "Ġfinanci": 24323, - "Ġoverly": 24324, - "âu": 24325, - "Ġgluten": 24326, - "Ġ1800": 24327, - "Ġdivisions": 24328, - "Ġciudad": 24329, - "Ġobed": 24330, - "Ġwarum": 24331, - "Ġeher": 24332, - "Ġelim": 24333, - "ĠÐĴо": 24334, - "Ġpeuvent": 24335, - "ĠWanna": 24336, - "Ġattendance": 24337, - "Ġassessments": 24338, - "ĠBog": 24339, - "Ġimagery": 24340, - "Ġcollectively": 24341, - "Ġinformal": 24342, - "ĠSchwe": 24343, - "Ġdeutlich": 24344, - "ĠChel": 24345, - "ĠPE": 24346, - "owed": 24347, - "Ġbanner": 24348, - "Ġshelves": 24349, - "ĠReturn": 24350, - "æĭ¿": 24351, - "LAUGHS": 24352, - "Ġcongratulate": 24353, - "ĠNorway": 24354, - "Ġdwell": 24355, - "ĠCaribbean": 24356, - "Ġnorms": 24357, - "ĠAnimal": 24358, - "ĠValentine": 24359, - "Ġextending": 24360, - "ĠVou": 24361, - "orr": 24362, - "ĠCheng": 24363, - "¡": 24364, - "ĠдоÑĢог": 24365, - "Ġveg": 24366, - "ĠhÃ¥": 24367, - "ĠXin": 24368, - "Ġì¹´ë": 24369, - "emet": 24370, - "Ġhypoth": 24371, - "Ġinteressante": 24372, - "rices": 24373, - "IZ": 24374, - "ĠUSD": 24375, - "Ġrunner": 24376, - "ĠBag": 24377, - "Ġê½": 24378, - "Ġcomeçar": 24379, - "Ġpigs": 24380, - "Ġweaknesses": 24381, - "Ph": 24382, - "ĠViol": 24383, - "ä¸įçĶ¨": 24384, - "Ġdragging": 24385, - "ĠAquÃŃ": 24386, - "ĠCSS": 24387, - "Ġmillimeters": 24388, - "Ġestás": 24389, - "Ġacute": 24390, - "Ġdejar": 24391, - "iÄŁ": 24392, - "obra": 24393, - "Love": 24394, - "Ġsilk": 24395, - "****": 24396, - "Ġjoins": 24397, - "Ġprol": 24398, - "Ġê°IJìĤ¬íķ©ëĭĪëĭ¤": 24399, - "æĶ¯": 24400, - "ØŃد": 24401, - "aghetti": 24402, - "änner": 24403, - "Ġstrang": 24404, - "Ġdoubled": 24405, - "Ġdescriptions": 24406, - "Ġstellen": 24407, - "Ġparti": 24408, - "ç«ĭ": 24409, - "²Ħë": 24410, - "ĠÃ¶ÄŁ": 24411, - "ighing": 24412, - "Ġangular": 24413, - "Ġnatuur": 24414, - "ĠShel": 24415, - "Æ°Æ¡": 24416, - "Ġrays": 24417, - "Ġseper": 24418, - "start": 24419, - "vised": 24420, - "Ġrushed": 24421, - "Ġinternationally": 24422, - "Ġnivel": 24423, - "Ġboxing": 24424, - "fallen": 24425, - "á»ijc": 24426, - "Ġseinen": 24427, - "plicity": 24428, - "Ġcarboh": 24429, - "ĠTravis": 24430, - "uso": 24431, - "ĠPhase": 24432, - "Ġactivation": 24433, - "Ġopio": 24434, - "·¨": 24435, - "Ġdecreased": 24436, - "Car": 24437, - "Ġbundle": 24438, - "Ġexpend": 24439, - "ormal": 24440, - "Ġadjacent": 24441, - "Ġmee": 24442, - "ĠоÑĢг": 24443, - "Ġtranscript": 24444, - "ĠLanguage": 24445, - "GS": 24446, - "è§ī": 24447, - "Ġseul": 24448, - "Ãłnh": 24449, - "Ġnya": 24450, - "nings": 24451, - "Ġìĭľë": 24452, - "ĠëĶ°ëĿ¼": 24453, - "ĠAgr": 24454, - "ÃŃd": 24455, - "çķĻ": 24456, - "Ġaby": 24457, - "ĠNeo": 24458, - "ıyoruz": 24459, - "ĠThinking": 24460, - "aime": 24461, - "Ġvite": 24462, - "Ġtravés": 24463, - "Ġ×ij×¢": 24464, - "Ġмед": 24465, - "Our": 24466, - "hoot": 24467, - "Ġliner": 24468, - "ĠPizza": 24469, - "Ġhyg": 24470, - "flies": 24471, - "ĠContinue": 24472, - "Ġdental": 24473, - "ĠTib": 24474, - "Ġregulate": 24475, - "lieÃŁ": 24476, - "ALK": 24477, - "ĠTae": 24478, - "길": 24479, - "ĠBrexit": 24480, - "ĠGut": 24481, - "Ġoccupation": 24482, - "Ġzrobi": 24483, - "âm": 24484, - "Ġwhisk": 24485, - "ä¸ĸçķĮ": 24486, - "Ġkanske": 24487, - "omon": 24488, - "robe": 24489, - "Ġwarfare": 24490, - "Ġthá»ĥ": 24491, - "Ġjaki": 24492, - "Ġstrokes": 24493, - "Ġpeas": 24494, - "ĠDamit": 24495, - "HAN": 24496, - "Ġinterference": 24497, - "ĠминÑĥÑĤ": 24498, - "NER": 24499, - "outing": 24500, - "Ġtextures": 24501, - "Łī": 24502, - "owi": 24503, - "ĠíķĻ": 24504, - "Ġdens": 24505, - "Ġprotagonist": 24506, - "änn": 24507, - "Ġgoddess": 24508, - "Ġwollte": 24509, - "ijo": 24510, - "ĠWoche": 24511, - "ĠVPN": 24512, - "story": 24513, - "Ġkinderg": 24514, - "Ġfunnel": 24515, - "Ġdistress": 24516, - "ноÑģÑĤÑĮÑİ": 24517, - "Ġnoisy": 24518, - "ĠпÑĢодолж": 24519, - "Ġdaran": 24520, - "Ġenzyme": 24521, - "лож": 24522, - "Ġmute": 24523, - "Ġdwar": 24524, - "Ġاس": 24525, - "Ġkompl": 24526, - "Ġmerit": 24527, - "Ġfosse": 24528, - "ĠDrink": 24529, - "Ġfora": 24530, - "Ġwohl": 24531, - "Ġbreeze": 24532, - "Ġsanit": 24533, - "Ġdrin": 24534, - "ĠìĿ´ê±°ëĬĶ": 24535, - "Ġ62": 24536, - "Ġì°¨ë": 24537, - "abytes": 24538, - "Ġdeeds": 24539, - "Ġй": 24540, - "ième": 24541, - "iggling": 24542, - "Ġ\"'": 24543, - "ĠÑĩаÑģÑĤÑĮ": 24544, - "ĠAnswer": 24545, - "Ġevangel": 24546, - "Ġ1080": 24547, - "ĠVisit": 24548, - "icient": 24549, - "Ġreliability": 24550, - "ÑİÑģÑĮ": 24551, - "ĠEarlier": 24552, - "Ġfid": 24553, - "çŃīä¸Ģä¸ĭ": 24554, - "Ġsleeves": 24555, - "iyorsun": 24556, - "Ġbib": 24557, - "ĠAccount": 24558, - "Ñıли": 24559, - "ciplinary": 24560, - "zas": 24561, - "ĠбеÑĢ": 24562, - "Ġnecklace": 24563, - "Ġblender": 24564, - "ĠPhillips": 24565, - "eti": 24566, - "ĠJupiter": 24567, - "Ġprovoc": 24568, - "ĠYears": 24569, - "entre": 24570, - "acio": 24571, - "Ġkü": 24572, - "Ġantenna": 24573, - "Ġnovels": 24574, - "Ġfart": 24575, - "ĠSugar": 24576, - "ĠJudy": 24577, - "Ġcollapsed": 24578, - "ç°": 24579, - "ritis": 24580, - "ĠìĥģíĻ©": 24581, - "ÐĹЫ": 24582, - "ĠVerf": 24583, - "ranean": 24584, - "ereum": 24585, - "ĠTarget": 24586, - "Ġ88": 24587, - "ĠÐĺз": 24588, - "ideo": 24589, - "Ġregression": 24590, - "ì¶ľ": 24591, - "Ġmówi": 24592, - "Ġstudios": 24593, - "iens": 24594, - "iph": 24595, - "Ġfrying": 24596, - "Ġfascinated": 24597, - "ĠWah": 24598, - "bucks": 24599, - "maya": 24600, - "ĠSaturn": 24601, - "ĠMommy": 24602, - "Ġratings": 24603, - "Ġautumn": 24604, - "Æ°Æ¡ng": 24605, - "Ġloser": 24606, - "Ġcentro": 24607, - "érieur": 24608, - "ĠFold": 24609, - "Ġsupervisor": 24610, - "ĠNobel": 24611, - "Ġunderest": 24612, - "obia": 24613, - "ĠвÑģÑı": 24614, - "Ġverw": 24615, - "Ġfuels": 24616, - "Ġartifacts": 24617, - "Ġë¶Ļ": 24618, - "ĠAutom": 24619, - "çļĦæĺ¯": 24620, - "ÛĶ": 24621, - "×ķס": 24622, - "Ġihnen": 24623, - "Ġ59": 24624, - "ounding": 24625, - "еÑĢÑĭ": 24626, - "inars": 24627, - "chant": 24628, - "Ġaddicted": 24629, - "Ġexplosive": 24630, - "Ġdispers": 24631, - "âĸĪ": 24632, - "axis": 24633, - "ARY": 24634, - "Ġlum": 24635, - "ĠÑĥÑģл": 24636, - "ĠØĮ": 24637, - "Ġrupees": 24638, - "ĠPearl": 24639, - "camp": 24640, - "tv": 24641, - "oya": 24642, - "Ġconcludes": 24643, - "Ġcollision": 24644, - "Ġbuyer": 24645, - "Ġplayground": 24646, - "Ġsprings": 24647, - "Ġfeminine": 24648, - "ĠRas": 24649, - "Ġincarcer": 24650, - "íĹĺ": 24651, - "Ġdialect": 24652, - "Ġclosure": 24653, - "Ġchatting": 24654, - "Ġbabe": 24655, - "Ġspotlight": 24656, - "Ġnotation": 24657, - "è·¯": 24658, - "Star": 24659, - "ião": 24660, - "Ġtête": 24661, - "Ġtide": 24662, - "Ġjunto": 24663, - "Ġsenator": 24664, - "Ð¥": 24665, - "Ġexcuses": 24666, - "Ġblink": 24667, - "Ġadmission": 24668, - "ĠLily": 24669, - "Ñĭми": 24670, - "Ġamigo": 24671, - "Ġlust": 24672, - "ëĭ¬": 24673, - "Ġamino": 24674, - "äºĭæĥħ": 24675, - "Ġconsultant": 24676, - "ĠElectric": 24677, - "Ġëħ¸ëŀĺ": 24678, - "ujah": 24679, - "Ġshooter": 24680, - "ichten": 24681, - "ĠUkrainian": 24682, - "Ġaims": 24683, - "ĠEntertain": 24684, - "Ġmiracles": 24685, - "èŃ°": 24686, - "Ġzeigen": 24687, - "Ġlam": 24688, - "Ġress": 24689, - "ĠJill": 24690, - "ylan": 24691, - "Ġrook": 24692, - "Ġhaya": 24693, - "Ġpassport": 24694, - "adata": 24695, - "Ġjuicy": 24696, - "conf": 24697, - "лей": 24698, - "ĠSz": 24699, - "Ġintercept": 24700, - "ãģĤãĤĬãģĮãģ¨ãģĨãģĶãģĸ": 24701, - "ĠTeams": 24702, - "Ġmaken": 24703, - "irrel": 24704, - "ĠLIKE": 24705, - "áºŃy": 24706, - "êµ°": 24707, - "Ġshortage": 24708, - "Ġparadigm": 24709, - "Ġpapel": 24710, - "Ġastero": 24711, - "ãģ¾ãģŁ": 24712, - "Ġsollen": 24713, - "ĠMickey": 24714, - "ĠOrleans": 24715, - "Ġcholesterol": 24716, - "Ġgoose": 24717, - "ÑĨиÑİ": 24718, - "ãģĤãĤĭ": 24719, - "ĠFL": 24720, - "Ġголов": 24721, - "Ġtribute": 24722, - "ĠGam": 24723, - "Ġévidemment": 24724, - "ÑıÑħ": 24725, - "å®ŀ": 24726, - "çĶ°": 24727, - "Ġinappropri": 24728, - "uhan": 24729, - "Ġorganizational": 24730, - "ailed": 24731, - "Ġendure": 24732, - "Ġ76": 24733, - "Ġshotgun": 24734, - "Ġlivre": 24735, - "Ġsuited": 24736, - "Ġwarmth": 24737, - "ĠSIM": 24738, - "Ġenvision": 24739, - "Ġdegrad": 24740, - "îne": 24741, - "Laughing": 24742, - "ĠWhoever": 24743, - "ĠBuddhism": 24744, - "Ġsprinkle": 24745, - "ceÄŁiz": 24746, - "Ġruins": 24747, - "Ġstarch": 24748, - "ĠHerz": 24749, - "Ġinjustice": 24750, - "Ġhumidity": 24751, - "ожалÑĥй": 24752, - "ĠObject": 24753, - "ĠIgn": 24754, - "ĠExam": 24755, - "igers": 24756, - "Ġthou": 24757, - "ĠSoy": 24758, - "ivas": 24759, - "Ġpoles": 24760, - "math": 24761, - "Ġвним": 24762, - "INGING": 24763, - "edral": 24764, - "Ġexplor": 24765, - "Ġroasted": 24766, - "Ġcrawl": 24767, - "Ġcoff": 24768, - "Ġanom": 24769, - "Ġwij": 24770, - "Ġimproves": 24771, - "Ġtreaty": 24772, - "Ġdiscovering": 24773, - "Ġstatute": 24774, - "Ġmercado": 24775, - "ĠÑģил": 24776, - "Ġintel": 24777, - "ĠChancellor": 24778, - "ĠMedicaid": 24779, - "ugi": 24780, - "Ġverbal": 24781, - "Ġdön": 24782, - "Ġscripture": 24783, - "Ġiteration": 24784, - "eks": 24785, - "ĠOxford": 24786, - "Ġwäh": 24787, - "ĠVad": 24788, - "ĠAK": 24789, - "ĠìķĦìĿ´ë": 24790, - "Ġiets": 24791, - "Ġneedles": 24792, - "ÙĥÙħ": 24793, - "Ġpasado": 24794, - "Ġalbums": 24795, - "Ġyea": 24796, - "etzen": 24797, - "ĦëıĦ": 24798, - "Ġdetermines": 24799, - "Ġthee": 24800, - "ĠPlaying": 24801, - "ärt": 24802, - "Ġצ": 24803, - "cled": 24804, - "Ġdownward": 24805, - "alone": 24806, - "Ġsolu": 24807, - "Ġpartition": 24808, - "Ġwz": 24809, - "dd": 24810, - "Ġpessoal": 24811, - "媽": 24812, - "Ġfactories": 24813, - "Ġbleibt": 24814, - "มา": 24815, - "alsa": 24816, - "ĠNFL": 24817, - "Ġfuera": 24818, - "Ġreserved": 24819, - "ĠEarn": 24820, - "Ġhelt": 24821, - "Ġshortcut": 24822, - "Ġconvincing": 24823, - "space": 24824, - "Ġenforce": 24825, - "Ġcores": 24826, - "Ġefter": 24827, - "Ġrecession": 24828, - "xico": 24829, - "Ġproposition": 24830, - "arians": 24831, - "ropol": 24832, - "Ġ몰ë": 24833, - "ĠÎľ": 24834, - "ĠìļĶì¦ĺ": 24835, - "Ġactivist": 24836, - "Ġconviction": 24837, - "Ġzab": 24838, - "Ġcanceled": 24839, - "ÑĤоÑĩно": 24840, - "Ġή": 24841, - "éĢĻ樣åŃIJ": 24842, - "nite": 24843, - "Ġfundra": 24844, - "buzzer": 24845, - "ело": 24846, - "ications": 24847, - "Ġzona": 24848, - "Ġteens": 24849, - "Ġmethodology": 24850, - "Ġì¤ijìļĶ": 24851, - "than": 24852, - "ĠUl": 24853, - "ĠGrey": 24854, - "Ġhog": 24855, - "INK": 24856, - "ĠSung": 24857, - "ĠClaud": 24858, - "ĠCNN": 24859, - "Ġdelivers": 24860, - "alin": 24861, - "ĠAdobe": 24862, - "othe": 24863, - "ĠDeswegen": 24864, - "ำ": 24865, - "Ġwerde": 24866, - "Ġgrease": 24867, - "Ġupgrades": 24868, - "ĠFinland": 24869, - "accept": 24870, - "Ġinterrog": 24871, - "bee": 24872, - "Ġãģ«": 24873, - "Ġprede": 24874, - "ĠNep": 24875, - "ĠCambridge": 24876, - "Ġgraphs": 24877, - "Ġhaunted": 24878, - "Ñģем": 24879, - "æ§": 24880, - "åħĭ": 24881, - "Some": 24882, - "ĠMall": 24883, - "Ġrehearsal": 24884, - "ĠUrban": 24885, - "ĠLag": 24886, - "Ġnim": 24887, - "ê°ķ": 24888, - "Ġpositioned": 24889, - "Ġavoided": 24890, - "EMA": 24891, - "Ġllegar": 24892, - "Ġrápido": 24893, - "Ġgouvern": 24894, - "Ġhing": 24895, - "Ġdealer": 24896, - "Ġreforms": 24897, - "Ġfatty": 24898, - "кол": 24899, - "ĠAce": 24900, - "Ġnep": 24901, - "Ġì²Ń": 24902, - "Ġcomputation": 24903, - "ĠStream": 24904, - "bourne": 24905, - "tur": 24906, - "Por": 24907, - "Ġsleepy": 24908, - "Ġbanget": 24909, - "ãģĤãģ®": 24910, - "Ġweighs": 24911, - "Ġbleiben": 24912, - "ĠGren": 24913, - "Ġunions": 24914, - "ĠêµIJ": 24915, - "Ġaprender": 24916, - "uitar": 24917, - "ĠJest": 24918, - "uming": 24919, - "ĠPlayer": 24920, - "ĠExtrem": 24921, - "Ġinteger": 24922, - "аÑĩе": 24923, - "Ġconcerts": 24924, - "×ķ׼": 24925, - "ĠtrochÄĻ": 24926, - "ĠRepe": 24927, - "éĩįè¦ģ": 24928, - "à¹Ĥ": 24929, - "żen": 24930, - "Ġsounding": 24931, - "Ġanonymous": 24932, - "Ġexca": 24933, - "ĠIranian": 24934, - "Ġenergetic": 24935, - "Ġwives": 24936, - "ĠÑĨвеÑĤ": 24937, - "Ġais": 24938, - "ãģĭãģª": 24939, - "Ġsudah": 24940, - "Ġunderwear": 24941, - "Ġcrunchy": 24942, - "ĠPain": 24943, - "Ġgerçek": 24944, - "redict": 24945, - "Ġmisma": 24946, - "ÑĸÑĤ": 24947, - "Ġsurviving": 24948, - "ÎŃÏĤ": 24949, - "Ġparticipant": 24950, - "ĠHessen": 24951, - "árias": 24952, - "Ġsubway": 24953, - "istä": 24954, - "Ġcoral": 24955, - "Ġmarijuana": 24956, - "ĠMemorial": 24957, - "ÑĪий": 24958, - "riz": 24959, - "Ġsatellites": 24960, - "Ġlease": 24961, - "ĠCameron": 24962, - "umph": 24963, - "Ġclassmates": 24964, - "ähän": 24965, - "ÑģÑĤве": 24966, - "Ġhue": 24967, - "ĵ¤ìĿĦ": 24968, - "Ġproportional": 24969, - "Ġnoss": 24970, - "Ġlaps": 24971, - "rÃ¥": 24972, - "Ġbitcoin": 24973, - "ÐĹЫÐļÐIJ": 24974, - "Ġ충": 24975, - "ĠÙĦÙĦ": 24976, - "ĠMort": 24977, - "ĠEsp": 24978, - "arnos": 24979, - "ĠÑģказал": 24980, - "Ġänd": 24981, - "åħĦ": 24982, - "×Ļ×Ļ×Ŀ": 24983, - "ĠGeb": 24984, - "gehen": 24985, - "Inaudible": 24986, - "borough": 24987, - "ÑĦÑĦ": 24988, - "Ġfellowship": 24989, - "ĠPaper": 24990, - "Ġcurved": 24991, - "ĠGEOR": 24992, - "Ġcalculator": 24993, - "ĠCatal": 24994, - "ĠvÃło": 24995, - "Ġbypass": 24996, - "леÑĤ": 24997, - "à³": 24998, - "trans": 24999, - "rencies": 25000, - "ì¡Į": 25001, - "igent": 25002, - "Ġtasted": 25003, - "Ġoceans": 25004, - "uft": 25005, - "ervice": 25006, - "ĠÐľÐ£ÐĹЫÐļÐIJ": 25007, - "ĠClassic": 25008, - "Ġrespectively": 25009, - "~)": 25010, - "ître": 25011, - "ĠNash": 25012, - "Ġzit": 25013, - "ĠìĽĥ": 25014, - "ĠëĨĴ": 25015, - "quote": 25016, - "ĠUns": 25017, - "Ġtac": 25018, - "Ġproves": 25019, - "ĠPortland": 25020, - "bly": 25021, - "Ġere": 25022, - "ì¶Ķ": 25023, - "Ġépoca": 25024, - "ĠÑĤÑĭÑģÑıÑĩ": 25025, - "76": 25026, - "Ġhade": 25027, - "ĠFro": 25028, - "ĠpolÃŃtica": 25029, - "tag": 25030, - "ĠíķŃ": 25031, - "Ġschö": 25032, - "arett": 25033, - "Ġprovisions": 25034, - "Ġmotors": 25035, - "Ġimaging": 25036, - "Ġdok": 25037, - "ulously": 25038, - "Ġmeille": 25039, - "çİ°åľ¨": 25040, - "ëIJ": 25041, - "ĠISO": 25042, - "ĠSTEM": 25043, - "ĠBowl": 25044, - "Ġtowers": 25045, - "ĠEe": 25046, - "ĠPerformance": 25047, - "Ġloin": 25048, - "cussion": 25049, - "Ġcoastal": 25050, - "iale": 25051, - "compass": 25052, - "Ġspells": 25053, - "Ġdisappointing": 25054, - "Ġë²Ī째": 25055, - "EER": 25056, - "Ġversatile": 25057, - "asury": 25058, - "Ġenfin": 25059, - "Ġdownside": 25060, - "Ġguiding": 25061, - "ĠاÙĦÙĤ": 25062, - "Ġninety": 25063, - "charged": 25064, - "ĠFans": 25065, - "Ġphilosophical": 25066, - "Ġgarn": 25067, - "ĠmÃ¥nga": 25068, - "Ġwillingness": 25069, - "Ġportions": 25070, - "aben": 25071, - "Ġï": 25072, - "¿": 25073, - "raul": 25074, - "Ġsprint": 25075, - "ifen": 25076, - "ıyla": 25077, - "ĠкÑĥп": 25078, - "ãģıãģłãģķãģĦ": 25079, - "Ġensuite": 25080, - "ĠCapitol": 25081, - "Ġ63": 25082, - "ĠговоÑĢиÑĤ": 25083, - "Ġappointments": 25084, - "æī¾": 25085, - "omiast": 25086, - "Ġcareg": 25087, - "Ġpublisher": 25088, - "Ġheraus": 25089, - "Ġεί": 25090, - "ĠVS": 25091, - "ãģĿãģĹãģ¦": 25092, - "ä¸Ńåħ±": 25093, - "Ġsacrifices": 25094, - "third": 25095, - "Ġhumanitarian": 25096, - "ĠëĤ´ì": 25097, - "imon": 25098, - "Ġinequ": 25099, - "Ġzob": 25100, - "Ġcomfortably": 25101, - "ĠDinge": 25102, - "Ġcancelled": 25103, - "ĠPSAKI": 25104, - "ĠRobinson": 25105, - "Ġfins": 25106, - ")?": 25107, - "ĠHistor": 25108, - "ĠÑĩеловека": 25109, - "Ġtbsp": 25110, - "text": 25111, - "kim": 25112, - "Ġupdating": 25113, - "Ġgeld": 25114, - "feld": 25115, - "ı¼": 25116, - "Ġmä": 25117, - "Ġcafé": 25118, - "ÖĢ": 25119, - "ĠSri": 25120, - "ĠRegion": 25121, - "ĠHahaha": 25122, - "Ġfinances": 25123, - "ĠاÙĦØ´": 25124, - "Ġbunk": 25125, - "ruk": 25126, - "haft": 25127, - "Ġlateral": 25128, - "Ġextensions": 25129, - "ĠìķĦìĿ´": 25130, - "Ġdefinite": 25131, - "ĠZhao": 25132, - "ĠLuis": 25133, - "sty": 25134, - "Ġcasos": 25135, - "ĠKlim": 25136, - "Ġ1993": 25137, - "Ġrealization": 25138, - "Ġhistorian": 25139, - "Ġcracked": 25140, - "ëĤ´": 25141, - "Ġsystème": 25142, - "ĠCIA": 25143, - "ĠÑĤво": 25144, - "ospheric": 25145, - "Ġflee": 25146, - "Ġrất": 25147, - "ĠRegardless": 25148, - "Ġreluct": 25149, - "Ġtimely": 25150, - "ĠJulian": 25151, - "GM": 25152, - "éĴ": 25153, - "adura": 25154, - "é£Ł": 25155, - "Ġdresses": 25156, - "çģ£": 25157, - "ĠëĶĶ": 25158, - "Ġnominated": 25159, - "Ġadvocates": 25160, - "ymph": 25161, - "Ġrecordings": 25162, - "Ġdeviation": 25163, - "Ġprioritize": 25164, - "Ġspiral": 25165, - "ĠYOUR": 25166, - "Ġtranspose": 25167, - "ampoo": 25168, - "ĠìĽIJëŀĺ": 25169, - "ĠVision": 25170, - "Ġpolite": 25171, - "Ġhamb": 25172, - "ĠPatient": 25173, - "æ¯Ķè¼ĥ": 25174, - "íģ¬ë": 25175, - "Ġsia": 25176, - "Ġê³³": 25177, - "Ġže": 25178, - "è§Ģ": 25179, - "Ġsupermarket": 25180, - "ë¹": 25181, - "ĠSierra": 25182, - "Ġgrilled": 25183, - "ĠUpon": 25184, - "Ġabsent": 25185, - "Ġmec": 25186, - "ĠApollo": 25187, - "Ġpunk": 25188, - "ĠPaÅĦst": 25189, - "ĠÑģвой": 25190, - "Ġ거기": 25191, - "Girl": 25192, - "Ġskinny": 25193, - "ĠPremier": 25194, - "Ġterritories": 25195, - "Ġliability": 25196, - "Ġjerk": 25197, - "ratic": 25198, - "Ġdancers": 25199, - "ĠÑĥÑĢов": 25200, - "Ġê´Ģë": 25201, - "only": 25202, - "ĠStu": 25203, - "Ġskeleton": 25204, - "ĠëŃIJë": 25205, - "Ġзакон": 25206, - "ıkt": 25207, - "ĠMIKE": 25208, - "Ġlö": 25209, - "mie": 25210, - "Ġreiter": 25211, - "ãģĵãĤĮãģ¯": 25212, - "ĠKolleg": 25213, - "ĠAdams": 25214, - "licher": 25215, - "Ġçocuk": 25216, - "Ñıг": 25217, - "Ġblush": 25218, - "Ġsunshine": 25219, - "Ġez": 25220, - "ĠDevil": 25221, - "Ġ길": 25222, - "ĠãģĬ": 25223, - "add": 25224, - "Ġlicensed": 25225, - "Ġvinyl": 25226, - "ĠCzech": 25227, - "imag": 25228, - "Ġcracking": 25229, - "Ġìº": 25230, - "Ġudah": 25231, - "Ġsommes": 25232, - "Ġìĸ¼êµ": 25233, - "waÄĩ": 25234, - "Ġfres": 25235, - "åij½": 25236, - "ĠWalmart": 25237, - "ĠТепеÑĢÑĮ": 25238, - "atisf": 25239, - "CI": 25240, - "lang": 25241, - "Ġdiffusion": 25242, - "çĶ·": 25243, - "Ġsomos": 25244, - "ĠMakes": 25245, - "æĪijæĥ³": 25246, - "ĠRicky": 25247, - "Ġmucha": 25248, - "íķ¨": 25249, - "Ġhorsepower": 25250, - "asia": 25251, - "Ġfibers": 25252, - "Ġerm": 25253, - "Ñģкие": 25254, - "Ġjeste": 25255, - "Ġfirefight": 25256, - "Ġcuisine": 25257, - "Ġbesonders": 25258, - "dig": 25259, - "Ġì¢ħ": 25260, - "ĠÑĥж": 25261, - "Ġtracing": 25262, - "Ġcertains": 25263, - "ĠApply": 25264, - "ÑĭваÑĤÑĮ": 25265, - "çĮ": 25266, - "Ġbru": 25267, - "ĠYES": 25268, - "ĠBai": 25269, - "ĠDit": 25270, - "ĠBis": 25271, - "Ġunle": 25272, - "ÑģÑĤаÑĤоÑĩно": 25273, - "ĠAwak": 25274, - "..\"": 25275, - "Ġ125": 25276, - "Ġrooted": 25277, - "Ġcautious": 25278, - "const": 25279, - "Ġorchestra": 25280, - "çľ¼": 25281, - "ĠвнÑĥÑĤ": 25282, - "Ġquelqu": 25283, - "ĠоÑĤвеÑĤ": 25284, - "ĠMethod": 25285, - "ì¹ľ": 25286, - "ĠμαÏĤ": 25287, - "lü": 25288, - "ĠìķĦê¹Į": 25289, - "Ġnaming": 25290, - "Char": 25291, - "ĠSicher": 25292, - "Ġprivileged": 25293, - "ĠFly": 25294, - "Ġãģĭ": 25295, - "áºŃt": 25296, - "Ġadvances": 25297, - "ĠZelda": 25298, - "Ġandra": 25299, - "Ġgrinding": 25300, - "ĠEdition": 25301, - "pf": 25302, - "Ġwarriors": 25303, - "Ġhedge": 25304, - "Ġunseren": 25305, - "ĠÑģÑİда": 25306, - "eliness": 25307, - "Ġpersonalities": 25308, - "Ġfö": 25309, - "'M": 25310, - "ĠÑĤоÑĩно": 25311, - "Ġshipped": 25312, - "Ġmeteor": 25313, - "Ġsurroundings": 25314, - "ĠFill": 25315, - "uesta": 25316, - "ĠPersonal": 25317, - "ĠAlle": 25318, - "ORT": 25319, - "ä¹ħ": 25320, - "ĠSche": 25321, - "VI": 25322, - "Ġcomparable": 25323, - "damn": 25324, - "Ġditch": 25325, - "YAN": 25326, - "ismus": 25327, - "Ġpickup": 25328, - "Ġdak": 25329, - "ĠEP": 25330, - "best": 25331, - "ĠSue": 25332, - "ällt": 25333, - "Ġpopcorn": 25334, - "Ġfolding": 25335, - "home": 25336, - "иваеÑĤ": 25337, - "å·²ç¶ĵ": 25338, - "Ġannot": 25339, - "chuck": 25340, - "Ġfierce": 25341, - "Ġdamaging": 25342, - "Ġflop": 25343, - "Ġpasar": 25344, - "Ġreef": 25345, - "ĠÑģвоей": 25346, - "Ġzoo": 25347, - "overs": 25348, - "jets": 25349, - "Ġprès": 25350, - "ĠSilicon": 25351, - "teok": 25352, - "ĠSeth": 25353, - "atamente": 25354, - "Ġtransmitted": 25355, - "Ġreplicate": 25356, - "Ġslim": 25357, - "ĠCream": 25358, - "æĦŁãģĺ": 25359, - "Ġsidewalk": 25360, - "ìĪĺë": 25361, - "ĠжизнÑĮ": 25362, - "ĠMonica": 25363, - "ä¾ĨäºĨ": 25364, - "Ġcopied": 25365, - "ĠTerra": 25366, - "istent": 25367, - "ç³»": 25368, - "Ġоно": 25369, - "Ġwhale": 25370, - "ĠWITH": 25371, - "лÑĥÑĪ": 25372, - "å½±çīĩ": 25373, - "ĠEen": 25374, - "ĠÑģвои": 25375, - "Ġordin": 25376, - "Ġplural": 25377, - "Ġspokes": 25378, - "Ġdispute": 25379, - "Ġsensible": 25380, - "Ġpreaching": 25381, - "Ġktórzy": 25382, - "pted": 25383, - "avier": 25384, - "Ġpistol": 25385, - "ĠTapi": 25386, - "ĠÅĤ": 25387, - "ffff": 25388, - "Ġacrylic": 25389, - "Ġignorance": 25390, - "ĠZiel": 25391, - "rans": 25392, - "Ġwelding": 25393, - "mid": 25394, - "æĪijä¸į": 25395, - "Ġзаним": 25396, - "Ġlanes": 25397, - "Ġmines": 25398, - "Ġmoms": 25399, - "×ķ×Ĺ": 25400, - "ĠChamber": 25401, - "tier": 25402, - "Ġmodest": 25403, - "ĠìĹ¬ê¸°ìĦľ": 25404, - "Ġunas": 25405, - "Ġwrench": 25406, - "handed": 25407, - "Ġsaturated": 25408, - "ĠFang": 25409, - "ĠCommissioner": 25410, - "र": 25411, - "Ġ×ĸ": 25412, - "ĠLouisiana": 25413, - "ĠMask": 25414, - "Ġcubes": 25415, - "ìĶ¨": 25416, - "Ġvidéos": 25417, - "ĠnÃ¥gon": 25418, - "Ġrider": 25419, - "Ġì¶ľ": 25420, - "Ġsón": 25421, - "ĠLatino": 25422, - "bank": 25423, - "íķ´ì£¼": 25424, - "ĠBrend": 25425, - "Ġsexuality": 25426, - "...,": 25427, - "Ġforgetting": 25428, - "ĠÛĮ": 25429, - "ĠAvengers": 25430, - "ĠBonjour": 25431, - "cessor": 25432, - "кÑĢаÑĹ": 25433, - "cence": 25434, - "Ġgeograph": 25435, - "culo": 25436, - "оÑģÑĤÑĮ": 25437, - "Ġsweating": 25438, - "íĥĢ": 25439, - "Ġsymmetry": 25440, - "tsÃ¥": 25441, - "Ġjan": 25442, - "ĠFerr": 25443, - "é¦ĸ": 25444, - "Ġambassador": 25445, - "ziÄĻk": 25446, - "Ġmusun": 25447, - "ĠÑĥÑĤ": 25448, - "ĠLG": 25449, - "issent": 25450, - "commun": 25451, - "Ġcours": 25452, - "Ġdevelops": 25453, - "Ġbronze": 25454, - "Ġsubstances": 25455, - "driven": 25456, - "주ìĦ¸ìļĶ": 25457, - "Ġaos": 25458, - "åĦĦ": 25459, - "ĠPROFESS": 25460, - "half": 25461, - "Ġsorted": 25462, - "ĠBomb": 25463, - "лаг": 25464, - "ĠMalaysia": 25465, - "ĠChristina": 25466, - "Ġteammate": 25467, - "èģŀ": 25468, - "FT": 25469, - "Ġkı": 25470, - "hearted": 25471, - "++": 25472, - "ogenic": 25473, - "Ġbells": 25474, - "ĠOuais": 25475, - "Ġspecialists": 25476, - "бÑĭ": 25477, - "depth": 25478, - "lasses": 25479, - "gies": 25480, - "ĠCoffee": 25481, - "Ġmarking": 25482, - "Ġfoll": 25483, - "uli": 25484, - "Ġadhesive": 25485, - "ĠBot": 25486, - "ĠPunkt": 25487, - "eye": 25488, - "ĠBub": 25489, - "elong": 25490, - "åĪ¶": 25491, - "ĠпÑĢик": 25492, - "Ġdonor": 25493, - "84": 25494, - "Ġenfor": 25495, - "Ġcatches": 25496, - "Ġbricks": 25497, - "Ġknitting": 25498, - "ĠKnowing": 25499, - "oks": 25500, - "HY": 25501, - "ride": 25502, - "ĠFantasy": 25503, - "iman": 25504, - "Ġpse": 25505, - "Ġìĺ¨": 25506, - "Ġвд": 25507, - "Ġrestra": 25508, - "Ġevaluated": 25509, - "ÑĢев": 25510, - "Ġfortunately": 25511, - "Ġchegar": 25512, - "رب": 25513, - "Ġdomains": 25514, - "ibi": 25515, - "arry": 25516, - "Ġshutter": 25517, - "Ġficou": 25518, - "Mike": 25519, - "Ġinclu": 25520, - "Ġdonors": 25521, - "Ġapl": 25522, - "ĠLower": 25523, - "Ġimported": 25524, - "Ġacademy": 25525, - "Ġfinals": 25526, - "Ġdisappears": 25527, - "ÙĬا": 25528, - "Ġadministrator": 25529, - "js": 25530, - "Ġcutter": 25531, - "Ġranging": 25532, - "örper": 25533, - "Ġconstraint": 25534, - "ĠTable": 25535, - "ĠShan": 25536, - "vic": 25537, - "ĠFix": 25538, - "ĠSwift": 25539, - "ounces": 25540, - "ĠWarum": 25541, - "Ġlettuce": 25542, - "appelle": 25543, - "Ġshave": 25544, - "Ġbás": 25545, - "Ġ77": 25546, - "ĠOoo": 25547, - "ao": 25548, - "ĠMcM": 25549, - "ĠDrew": 25550, - "Ġlump": 25551, - "Ġlashes": 25552, - "scheinlich": 25553, - "Rep": 25554, - "inis": 25555, - "ĠCette": 25556, - "Ġcomposite": 25557, - "emetery": 25558, - "Ġsorte": 25559, - "ĠFinancial": 25560, - "оне": 25561, - "rones": 25562, - "ĠVoy": 25563, - "Ġtéc": 25564, - "ł¹": 25565, - "ĠNinja": 25566, - "ĠCorin": 25567, - "еннÑı": 25568, - "ìĿ´ìĹĪ": 25569, - "Ġnich": 25570, - "Ġdetective": 25571, - "âĢ¦\"": 25572, - "Ïĥε": 25573, - "Ŀ¼ëıĦ": 25574, - "Ġë³Ģ": 25575, - "Ġë¸Ķë": 25576, - "Ġprope": 25577, - "ĠWright": 25578, - "Ġ×Ķת": 25579, - "ĠShi": 25580, - "ĠãģŁ": 25581, - "Ġinvestigations": 25582, - "éĤĦæĺ¯": 25583, - "ĠPowerPoint": 25584, - "ĠChu": 25585, - "Ġìĺ¤í": 25586, - "ĠìĻĦìłĦ": 25587, - "ĠFragen": 25588, - "unning": 25589, - "Ġpourrait": 25590, - "Ġtextbook": 25591, - "мÑĭ": 25592, - "Ġfahren": 25593, - "ĠÑĤоÑĢ": 25594, - "Ġlakes": 25595, - "ünde": 25596, - "Int": 25597, - "ĠMetro": 25598, - "Ġmansion": 25599, - "Ġаб": 25600, - "ĠZhou": 25601, - "Ġcorridor": 25602, - "Ġescol": 25603, - "Ġindicating": 25604, - "iaÅĤa": 25605, - "Ġmommy": 25606, - "Ġarchives": 25607, - "Ġfounders": 25608, - "engine": 25609, - "ĠDieu": 25610, - "Ġsickness": 25611, - "Ġë³´ëĭĪê¹Į": 25612, - "Ġarb": 25613, - "Ġned": 25614, - "ĠChop": 25615, - "Ġcovid": 25616, - "Ġslam": 25617, - "Ġpublications": 25618, - "DC": 25619, - "Ġspends": 25620, - "æ¾": 25621, - "Ġrefugee": 25622, - "Ġdile": 25623, - "Ġ×IJ×ĸ": 25624, - "ificar": 25625, - "ĠSach": 25626, - "Gu": 25627, - "Ġreload": 25628, - "????": 25629, - "ĠjeÅĽli": 25630, - "ĠÑģоÑģÑĤо": 25631, - "Ġsimplicity": 25632, - "Ġbullying": 25633, - "Ġмол": 25634, - "Ġrealidad": 25635, - "Ġunclear": 25636, - "appa": 25637, - "levant": 25638, - "ĠISIS": 25639, - "ĠWatson": 25640, - "Ġdein": 25641, - "ĠMicro": 25642, - "íķľë": 25643, - "üg": 25644, - "Ġdevam": 25645, - "Ġtweeted": 25646, - "å°İ": 25647, - "Ġunderstandable": 25648, - "atan": 25649, - "Ġversa": 25650, - "Ġpreca": 25651, - "Ġvá»ģ": 25652, - "ĠCopy": 25653, - "ĠOracle": 25654, - "Ġmindfulness": 25655, - "Ġdiscret": 25656, - "ernen": 25657, - "ĠPle": 25658, - "Have": 25659, - "Ġisolate": 25660, - "Ġdeu": 25661, - "Ġseventy": 25662, - "ĠHills": 25663, - "Ġarcade": 25664, - "ĠÑģпеÑĨи": 25665, - "Ġsiguiente": 25666, - "ĠBÃľNDNIS": 25667, - "liga": 25668, - "ĠвÑģÑĤÑĢеÑĩ": 25669, - "ôm": 25670, - "Ġtweets": 25671, - "Ġschauen": 25672, - "Ġcritique": 25673, - "ĠðŁİµ": 25674, - "Ġstatt": 25675, - "ĠÑģамое": 25676, - "ância": 25677, - "Ġsupernatural": 25678, - "Ġplugged": 25679, - "Fl": 25680, - "ynı": 25681, - "ĠTambién": 25682, - "Ġencouragement": 25683, - "ĠServer": 25684, - "ëĤľ": 25685, - "upa": 25686, - "Ġaston": 25687, - "Ġhears": 25688, - "ÑĢаÑħ": 25689, - "Ġsche": 25690, - "Ġrats": 25691, - "Ġrecuper": 25692, - "Ġunten": 25693, - "ĠFighting": 25694, - "Ġacademics": 25695, - "示": 25696, - "ĠSü": 25697, - "ÑģкиÑħ": 25698, - "Ġpaired": 25699, - "ĢìĿĦ": 25700, - "Ġárea": 25701, - "Ġsweetness": 25702, - "åıĬ": 25703, - "Ġdefer": 25704, - "Ġmuitas": 25705, - "ĠAudio": 25706, - "Ġlocker": 25707, - "ÙĬد": 25708, - "ĠÑģÑĤав": 25709, - "Ġbuena": 25710, - "ANS": 25711, - "Ġdetector": 25712, - "avo": 25713, - "bek": 25714, - "Ġαν": 25715, - "íݸ": 25716, - "Ġdragged": 25717, - "Ġдолжен": 25718, - "Ãĸ": 25719, - "رة": 25720, - "ìĿ´ì§Ģ": 25721, - "Ġcelle": 25722, - "cking": 25723, - "ĠاÙĦج": 25724, - "ĠCanvas": 25725, - "Ġespañ": 25726, - "Ġglimp": 25727, - "Ġspreads": 25728, - "ongo": 25729, - "ĠMason": 25730, - "ĠIng": 25731, - "Ġê°ĢëĬ¥": 25732, - "ÏĦικ": 25733, - "Ġsecular": 25734, - "Ġbater": 25735, - "Ġinquiry": 25736, - "Ġenergies": 25737, - "Ġmanufactured": 25738, - "Ġvegetarian": 25739, - "Ġpineapple": 25740, - "ÑıÑĤа": 25741, - "Ġpractitioners": 25742, - "2000": 25743, - "Ġíķ´ìļĶ": 25744, - "ĠìŬ룬ë¶Ħëĵ¤": 25745, - "Ġë¶Īë": 25746, - "ĠJefferson": 25747, - "ĠJoan": 25748, - "Ġtram": 25749, - "容": 25750, - "chmal": 25751, - "ĠHait": 25752, - "á¹ĩ": 25753, - "Ġunreal": 25754, - "Ġsymbolic": 25755, - "Ġstealth": 25756, - "Ġsplash": 25757, - "ĠEntertainment": 25758, - "Ġmetallic": 25759, - "?\".": 25760, - "è¶Ĭ": 25761, - "around": 25762, - "Ġdespair": 25763, - "ĠNevada": 25764, - "ĠFinance": 25765, - "Ġkrie": 25766, - "ĠLux": 25767, - "ĠSmash": 25768, - "keeping": 25769, - "Ġзаг": 25770, - "Ġnarciss": 25771, - "Ġdzisiaj": 25772, - "Ġtolerate": 25773, - "oard": 25774, - "Ġlinking": 25775, - "ĠEconomic": 25776, - "Ġì¼": 25777, - "Ġmorph": 25778, - "ĠNak": 25779, - "ĠBaker": 25780, - "aton": 25781, - "rings": 25782, - "ĠPeng": 25783, - "ĠAirport": 25784, - "ãģĭãģ£ãģŁ": 25785, - "íķĺëĭ¤": 25786, - "§ģ": 25787, - "prints": 25788, - "Ġhadi": 25789, - "Ġempir": 25790, - "ĠLives": 25791, - "anners": 25792, - "Ġним": 25793, - "ĠPROFESSOR": 25794, - "Ġpositively": 25795, - "antom": 25796, - "Ġbadge": 25797, - "kelt": 25798, - "Ġinterfer": 25799, - "Ġfulfilling": 25800, - "Ġvisualization": 25801, - "éĹľä¿Ĥ": 25802, - "ĠPrice": 25803, - "��": 25804, - "Ġscenery": 25805, - "Ġprone": 25806, - "Ġwizard": 25807, - "Ġbanyak": 25808, - "verb": 25809, - "sky": 25810, - "Ġwished": 25811, - "Ġrailway": 25812, - "Ġüzer": 25813, - "Ġalguien": 25814, - "ĠAW": 25815, - "ĠколиÑĩе": 25816, - "Ġreacting": 25817, - "ĠBuch": 25818, - "ึ": 25819, - "Ġanth": 25820, - "Ġsih": 25821, - "Ġhust": 25822, - "ĠScreen": 25823, - "ilant": 25824, - "aho": 25825, - "Ġfragrance": 25826, - "Ġelevation": 25827, - "ĠMediter": 25828, - "Ġë¿": 25829, - "Ġéqu": 25830, - "Ġwraps": 25831, - "Ġinert": 25832, - "Ġrecreate": 25833, - "лаÑĤ": 25834, - "Ġboleh": 25835, - "Ġharassment": 25836, - "unky": 25837, - "Ġglimpse": 25838, - "regierung": 25839, - "Ġfutur": 25840, - "Ġrepository": 25841, - "Ġengra": 25842, - "Ġtrafficking": 25843, - "assis": 25844, - "ĠTrek": 25845, - "Ġë²Į": 25846, - "Ġë§Īë": 25847, - "ĠKab": 25848, - "aniu": 25849, - "give": 25850, - "Ġdinosaurs": 25851, - "Ġfeather": 25852, - "Ġattitudes": 25853, - "Ġplum": 25854, - "ĠRS": 25855, - "ĠAnfang": 25856, - "illery": 25857, - "ĠìĬ¤": 25858, - "MY": 25859, - "Ġtrzeba": 25860, - "Ġskies": 25861, - "ĠAj": 25862, - "urable": 25863, - "CU": 25864, - "ĠShane": 25865, - "Ġdeparture": 25866, - "ĠTON": 25867, - "ieten": 25868, - "rats": 25869, - "æ°Ĺ": 25870, - "isu": 25871, - "Ġbord": 25872, - "Ġinterestingly": 25873, - "çĻ»": 25874, - "oughing": 25875, - "Ġrushing": 25876, - "Ġvolatility": 25877, - "Ġpyt": 25878, - "Ġformats": 25879, - "ĠзаÑĤ": 25880, - "Ġê¼Ń": 25881, - "Ġwhatnot": 25882, - "Ġcomport": 25883, - "sw": 25884, - "orean": 25885, - "ĠRelax": 25886, - "Ġclan": 25887, - "ĠAH": 25888, - "Ġpew": 25889, - "Ġdictionary": 25890, - "Take": 25891, - "shirts": 25892, - "ĠHugh": 25893, - "ĠعÙĦÙĬ": 25894, - "ĠPic": 25895, - "Ġenrolled": 25896, - "Ġjednak": 25897, - "Ġofferings": 25898, - "Ġcoraz": 25899, - "Life": 25900, - "Ġ!!!": 25901, - "Ġcler": 25902, - "ĠVideos": 25903, - "ĠRodrig": 25904, - "ĠIdent": 25905, - "ĠPos": 25906, - "ĠStage": 25907, - "ĠRace": 25908, - "Ġenact": 25909, - "ãģĦãģ¾ãģĹãģŁ": 25910, - "ĠGy": 25911, - "ĠHispan": 25912, - "Ġdefence": 25913, - "ĠCampbell": 25914, - "matic": 25915, - "Ġrelev": 25916, - "Ġpeach": 25917, - "Ħ¸ìļĶ": 25918, - "Ġparadise": 25919, - "Ġceremon": 25920, - "Ġannoyed": 25921, - "æĮĩ": 25922, - "lax": 25923, - "Ġexploit": 25924, - "Ġclause": 25925, - "eker": 25926, - "ĠBloom": 25927, - "nant": 25928, - "ateurs": 25929, - "Ġheights": 25930, - "Even": 25931, - "Ñģон": 25932, - "Ġoutrage": 25933, - "ĠVietnamese": 25934, - "ãģ¯ãģ¯": 25935, - "TR": 25936, - "Ġeer": 25937, - "Ġcannon": 25938, - "ĠComb": 25939, - "IJë§Į": 25940, - "è»Ĭ": 25941, - "Ġê²ĥëıĦ": 25942, - "Ġaccomplishments": 25943, - "ĠAnalytics": 25944, - "Ġshaping": 25945, - "reiben": 25946, - "Ġbachelor": 25947, - "Ġfingert": 25948, - "acked": 25949, - "Ġpyramid": 25950, - "ĠStewart": 25951, - "ást": 25952, - "Ġsurvivor": 25953, - "Ġduct": 25954, - "Ġdealers": 25955, - "æ´»": 25956, - "عÙħ": 25957, - "лин": 25958, - "Ġede": 25959, - "×ķ×¢": 25960, - "ĠÙĥاÙĨ": 25961, - "ĠÏĦι": 25962, - "Ġchooses": 25963, - "ĠOwn": 25964, - "гоÑĤов": 25965, - "hire": 25966, - "алÑĮнÑĭе": 25967, - "ĠÐĽÑİ": 25968, - "ĠоÑģÑĤав": 25969, - "tech": 25970, - "Ġdroit": 25971, - "Ġsubjective": 25972, - "enes": 25973, - "Ġdivis": 25974, - "avez": 25975, - "Ġmaneuver": 25976, - "à¹Ħà¸Ķ": 25977, - "adece": 25978, - "ĠEns": 25979, - "acial": 25980, - "ĠProtection": 25981, - "ĸ´": 25982, - "Ġformally": 25983, - "Ġwyd": 25984, - "inguém": 25985, - "Ġziem": 25986, - "Ġrecruiting": 25987, - "×Ļ×ļ": 25988, - "nem": 25989, - "Ġforbidden": 25990, - "ĠBapt": 25991, - "×IJ׳×Ļ": 25992, - "Ġsubset": 25993, - "ĠMagaz": 25994, - "nement": 25995, - "Ġaquela": 25996, - "ragon": 25997, - "Ġcommittees": 25998, - "Ġétaient": 25999, - "udi": 26000, - "ĠDawn": 26001, - "Ġbore": 26002, - "Ġcomposer": 26003, - "ĠwiÄĻcej": 26004, - "anga": 26005, - "Ġdislike": 26006, - "ĠDays": 26007, - "åŁº": 26008, - "Ġparal": 26009, - "Ġmientras": 26010, - "Ġheavens": 26011, - "ãģĴ": 26012, - "heid": 26013, - "Ġtraders": 26014, - "once": 26015, - "Ġmascara": 26016, - "ĠÏĢÏģο": 26017, - "Ġwhisper": 26018, - "ĠMusk": 26019, - "éĽĨ": 26020, - "ĠFamilie": 26021, - "Allah": 26022, - "ĠOlivia": 26023, - "ĠPros": 26024, - "Ġolika": 26025, - "ilim": 26026, - "Ġrépond": 26027, - "ĠPeters": 26028, - "Ġå¾Ī": 26029, - "Ġbites": 26030, - "Ġvic": 26031, - "ĠNY": 26032, - "emption": 26033, - "Ġ450": 26034, - "Ġvisuals": 26035, - "Ġlieu": 26036, - "ücken": 26037, - "ĠSteel": 26038, - "ĠGP": 26039, - "wait": 26040, - "Ġnoticeable": 26041, - "ucha": 26042, - "Ġrehabil": 26043, - "Ġrejection": 26044, - "ĠÑģледÑĥÑİÑī": 26045, - "Ġslider": 26046, - "Ġregarded": 26047, - "Ġgravit": 26048, - "ĠReserve": 26049, - "count": 26050, - "Ġbreeding": 26051, - "Ġlonge": 26052, - "aleb": 26053, - "Ġknight": 26054, - "Ġвой": 26055, - "Ġprésent": 26056, - "ĤĺìļĶ": 26057, - "ĠSpecifically": 26058, - "Ġposes": 26059, - "Ġveure": 26060, - "okay": 26061, - "emas": 26062, - "Ġãģ§ãģĻ": 26063, - "ĠmajÄħ": 26064, - "Ġwebinars": 26065, - "Ġcannabis": 26066, - "Ġdamals": 26067, - "ĠNorthwest": 26068, - "Ġpada": 26069, - "Ġcrowds": 26070, - "Ġfutures": 26071, - "Ġän": 26072, - "Ġcivilians": 26073, - "ĠSachen": 26074, - "æį": 26075, - "Ġtraces": 26076, - "Ġë¨¹ê³ł": 26077, - "QU": 26078, - "é¡ĺãģĦ": 26079, - "ĠIF": 26080, - "anın": 26081, - "ìĤ´": 26082, - "Ġbiblical": 26083, - "ĠVed": 26084, - "Ġstoring": 26085, - "ÑĢавлÑı": 26086, - "æĩī該": 26087, - "Ġnast": 26088, - "Ġdö": 26089, - "ÑĢоп": 26090, - "elia": 26091, - "Ġsideways": 26092, - "ĠUnderstand": 26093, - "ĠQur": 26094, - "Ġperpend": 26095, - "ĠMillionen": 26096, - "Ġwatermelon": 26097, - "ĠDivine": 26098, - "ultur": 26099, - "abord": 26100, - "Ġsuccesses": 26101, - "Ġhombre": 26102, - "Ġcarp": 26103, - "Ġsuscept": 26104, - "ungkin": 26105, - "Ġkij": 26106, - "ulus": 26107, - "اج": 26108, - "Ġnotch": 26109, - "Ġpolynomial": 26110, - "å¹²": 26111, - "å©": 26112, - "Ġúnico": 26113, - "Ġtelescope": 26114, - "Ġpolitique": 26115, - "kiem": 26116, - "ĠÎŃνα": 26117, - "Ġaggregate": 26118, - "ĠGeoff": 26119, - "Ġtril": 26120, - "ĠGRA": 26121, - "Ġsubscriber": 26122, - "imet": 26123, - "ĠдоллаÑĢ": 26124, - "oping": 26125, - "Ġtherapeut": 26126, - "ĠCancer": 26127, - "Ġparade": 26128, - "Ġirrig": 26129, - "âĻªâĻª": 26130, - "Ġclearer": 26131, - "Ġbog": 26132, - "ĠMaur": 26133, - "าà¸ĩ": 26134, - "ĠShanghai": 26135, - "achte": 26136, - "ĠKol": 26137, - "elujah": 26138, - "Ġhav": 26139, - "ĠCrime": 26140, - "sek": 26141, - "Ġë¡ľ": 26142, - "ienna": 26143, - "ĠGor": 26144, - "èĽ": 26145, - "ĠпоÑĤÑĢ": 26146, - "ĠкажеÑĤÑģÑı": 26147, - "ĠLift": 26148, - "ĠSort": 26149, - "ĠPsal": 26150, - "Ġping": 26151, - "ĵĿ": 26152, - "phis": 26153, - "ĠFUCK": 26154, - "ĠSyn": 26155, - "Ġbamboo": 26156, - "¬ìĺģ": 26157, - "cuts": 26158, - "Ġmmm": 26159, - "Ġfunktioniert": 26160, - "Ġ_": 26161, - "ÃŃcio": 26162, - "Stop": 26163, - "Ġimaginary": 26164, - "Ġnotamment": 26165, - "ĠInitiative": 26166, - "ãĥ¥": 26167, - "ĠKurt": 26168, - "Ġloosen": 26169, - "Ġbuscar": 26170, - "çģ«": 26171, - "Ġzelf": 26172, - "Ġprops": 26173, - "åĽī": 26174, - "Ġmoeten": 26175, - "Ġmilli": 26176, - "Ġhalls": 26177, - "ĠMatch": 26178, - "Ġbrackets": 26179, - "ĠCou": 26180, - "æ¦Ĥ": 26181, - "ĠÐľÐ°ÑĢ": 26182, - "ISA": 26183, - "Ġcigarette": 26184, - "Ġcompetitions": 26185, - "ĠMIN": 26186, - "Ġbehö": 26187, - "voor": 26188, - "Ġust": 26189, - "ĠZi": 26190, - "ĠOcc": 26191, - "ulates": 26192, - "Ġballoons": 26193, - "Ġpronto": 26194, - "ĠMiy": 26195, - "ĠFile": 26196, - "ĠклаÑģÑģ": 26197, - "нÑĥл": 26198, - "Ġcereal": 26199, - "Ġincrement": 26200, - "Ġrefined": 26201, - "åı¦å¤ĸ": 26202, - "prising": 26203, - "ĠRF": 26204, - "Ġrespectful": 26205, - "Ġloot": 26206, - "asket": 26207, - "Ġdeixa": 26208, - "ingle": 26209, - "Ġfunciona": 26210, - "ĠRevel": 26211, - "Ġsober": 26212, - "Ġperforms": 26213, - "ĠGentle": 26214, - "ãĤ¨": 26215, - "Ġrecipient": 26216, - "ĠHause": 26217, - "Ġëĥ": 26218, - "From": 26219, - "Ġministers": 26220, - "Ġparadox": 26221, - "å°±æĺ¯èªª": 26222, - "Ġtasting": 26223, - "Ġ×Ķ×Ĺ": 26224, - "Ġreuse": 26225, - "ĠLane": 26226, - "ĠÑģовеÑĢÑĪ": 26227, - "Ġremembers": 26228, - "Ġfeminist": 26229, - "Ġcommitments": 26230, - "Ġprojected": 26231, - "Ġgaz": 26232, - "iyoruz": 26233, - "Ġobligations": 26234, - "Ro": 26235, - "zar": 26236, - "Ġchw": 26237, - "ĠJAM": 26238, - "ĠbÄĻdÄħ": 26239, - "aspberry": 26240, - "ĠмеÑģÑĤо": 26241, - "ë²ķ": 26242, - "Ġregulated": 26243, - "Ġwicht": 26244, - "ĠTrevor": 26245, - "Ġsecondly": 26246, - "ĠIhre": 26247, - "elsh": 26248, - "Ġreporters": 26249, - "ÑĤоÑĢа": 26250, - "oyo": 26251, - "GI": 26252, - "Ġinterconnect": 26253, - "éIJĺ": 26254, - "OSH": 26255, - "æŃ²": 26256, - "Ġbrass": 26257, - "Ġignoring": 26258, - "ä»ĬæĹ¥": 26259, - "infect": 26260, - "Ġprojekt": 26261, - "oret": 26262, - "ÏĦαν": 26263, - "ĠÑĤип": 26264, - "Ġmutta": 26265, - "Ġunboxing": 26266, - "Ħ°": 26267, - "å¡Ĭ": 26268, - "Ġadvised": 26269, - "ĠDenver": 26270, - "Ġseverely": 26271, - "ĠMhm": 26272, - "Ġflipped": 26273, - "Ġpien": 26274, - "Ġkommun": 26275, - "ĠFRE": 26276, - "Ġà®ĩà®°": 26277, - "ainted": 26278, - "Ġknives": 26279, - "Ġhabl": 26280, - "Ġgeworden": 26281, - "arettes": 26282, - "CS": 26283, - "ĠмаленÑĮ": 26284, - "Ġgalax": 26285, - "Ġninete": 26286, - "ê±°ëĤĺ": 26287, - "Ġsis": 26288, - "Ġadvisory": 26289, - "Ġdrilling": 26290, - "ĠWouldn": 26291, - "ünf": 26292, - "gestellt": 26293, - "ĠHelen": 26294, - "Ġ×ŀ×IJ": 26295, - "apolis": 26296, - "Ġrzeczy": 26297, - "Ġterra": 26298, - "Ġhep": 26299, - "Ġalgún": 26300, - "ikk": 26301, - "Ġastronom": 26302, - "ĠStarbucks": 26303, - "kÄħ": 26304, - "Ġpatrol": 26305, - "Ġì½Ķ": 26306, - "Ġgon": 26307, - "ĠãĢIJ": 26308, - "Ġsonst": 26309, - "Ġencounters": 26310, - "Ġretrou": 26311, - "Ġsharks": 26312, - "Ġdor": 26313, - "ĠRever": 26314, - "Ġevapor": 26315, - "Ġreservoir": 26316, - "Ġalleged": 26317, - "uler": 26318, - "Ġverm": 26319, - "Ġcommerce": 26320, - "Ġfitted": 26321, - "gem": 26322, - "Ġtactical": 26323, - "Ġlith": 26324, - "éīĦå¡Ķ": 26325, - "had": 26326, - "è®Ĭ": 26327, - "Ġcarbohyd": 26328, - "Ġlengths": 26329, - "ιο": 26330, - "Ġdemographic": 26331, - "Rob": 26332, - "ĠSkin": 26333, - "ccoli": 26334, - "Ġsimplified": 26335, - "Ġreadily": 26336, - "ĠCum": 26337, - "adesh": 26338, - "ĠDÃ¥": 26339, - "usst": 26340, - "igne": 26341, - "eton": 26342, - "Ġmenor": 26343, - "qi": 26344, - "OOM": 26345, - "à¸Ńà¸Ļ": 26346, - "Ġpsychiat": 26347, - "Ġeighty": 26348, - "Ġмилли": 26349, - "ĠTob": 26350, - "edo": 26351, - "網": 26352, - "ĠÄijến": 26353, - "Ġcircuits": 26354, - "ĠLAUGH": 26355, - "icism": 26356, - "emor": 26357, - "Ġregener": 26358, - "egree": 26359, - "Ġbureauc": 26360, - "ĠAlber": 26361, - "ä¹ĭå¾Į": 26362, - "ĠWor": 26363, - "夫": 26364, - "Ġresin": 26365, - "ĠbyÅĤy": 26366, - "ĠIG": 26367, - "à¯į,": 26368, - "Ġ78": 26369, - "Ġweeds": 26370, - "ĠMyth": 26371, - "93": 26372, - "æ¿": 26373, - "ĠëĤĺìĻĶ": 26374, - "év": 26375, - "á½": 26376, - "ören": 26377, - "çar": 26378, - "ĠPAUL": 26379, - "Ġdisadvant": 26380, - "Ġpositioning": 26381, - "Ġcocktail": 26382, - "Ġagrees": 26383, - "nn": 26384, - "ĠSally": 26385, - "Ms": 26386, - "Ġinherent": 26387, - "Ġmonetary": 26388, - "Ġnatur": 26389, - "ĠNh": 26390, - "ĠImport": 26391, - "Ġleben": 26392, - "Ġwi": 26393, - "ussy": 26394, - "Ġobes": 26395, - "Ġwandering": 26396, - "Ġìĭłë": 26397, - "Äħda": 26398, - "etchup": 26399, - "Ġdisposal": 26400, - "ĠJA": 26401, - "ĠCer": 26402, - "zilla": 26403, - "Ġvirgin": 26404, - "ĠSlide": 26405, - "andel": 26406, - "Ġrighteousness": 26407, - "ĠΣ": 26408, - "Ġideia": 26409, - "ä½łå¥½": 26410, - "иÑĢоваÑĤÑĮ": 26411, - "ר×IJ": 26412, - "Comment": 26413, - "Ġprelim": 26414, - "ĠVale": 26415, - "Ġì§ĢëĤľ": 26416, - "ĠVanc": 26417, - "OMAN": 26418, - "ĠпÑĸд": 26419, - "Ġyum": 26420, - "stre": 26421, - "cem": 26422, - "Ġpocz": 26423, - "Ġfragment": 26424, - "ĠÑģлÑĥÑĩае": 26425, - "Ġundergo": 26426, - "ĠHank": 26427, - "ceks": 26428, - "ĠFPS": 26429, - "Ġocur": 26430, - "Ġdeterior": 26431, - "注": 26432, - "Ġempresas": 26433, - "Paul": 26434, - "Ġ)))": 26435, - "ĠвÑĢемени": 26436, - "Ġscold": 26437, - "×Ļ×¢": 26438, - "Ġsuspected": 26439, - "Ġaccessing": 26440, - "Ġsubstit": 26441, - "Ġhistorians": 26442, - "ä»»": 26443, - "Ġдело": 26444, - "Ġsocied": 26445, - "rone": 26446, - "Ġreden": 26447, - "Ġextends": 26448, - "epherd": 26449, - "Ġbalcon": 26450, - "ä¸įèµ·": 26451, - "ĠSolo": 26452, - "Ġpolitician": 26453, - "олÑĮно": 26454, - "Ġirgendw": 26455, - "Ġtraumatic": 26456, - "Ġrapper": 26457, - "ĠROBERT": 26458, - "Really": 26459, - "æģ¯": 26460, - "Ġlineup": 26461, - "ASE": 26462, - "Ġcontractor": 26463, - "ĠCorporation": 26464, - "gor": 26465, - "ĠTodo": 26466, - "ÑģÑĤÑĢой": 26467, - "FBE": 26468, - "Ġnewsletter": 26469, - "ĠkoÅĦ": 26470, - "alties": 26471, - "ĠпÑĢиÑĩ": 26472, - "ĠHeavy": 26473, - "Ġswords": 26474, - "Ġmanipulation": 26475, - "Ġfunk": 26476, - "ĠvÃ¥r": 26477, - "ĠTaliban": 26478, - "Ġë°¥": 26479, - "Ġacne": 26480, - "ürü": 26481, - "Ġdeswegen": 26482, - "ĠDust": 26483, - "Ġsilic": 26484, - "Ġhooks": 26485, - "Ġblij": 26486, - "Ġpetits": 26487, - "Ġfilme": 26488, - "ĠBereich": 26489, - "ĠSaid": 26490, - "Ġimposed": 26491, - "Ġdiary": 26492, - "ĠгоÑĢ": 26493, - "ĠGates": 26494, - "Ġalta": 26495, - "å¸Į": 26496, - "Ġchcia": 26497, - "pleasant": 26498, - "Ġë°Ŀ": 26499, - "Ġmożemy": 26500, - "ĠAustria": 26501, - "Ġbroker": 26502, - "Ġsucked": 26503, - "èĢĥ": 26504, - "Ġcompartment": 26505, - "Ġclone": 26506, - "Ġ×Ķ×¢": 26507, - "ĠDanke": 26508, - "Ġnochmal": 26509, - "езд": 26510, - "Ġadrenal": 26511, - "Ġkleinen": 26512, - "ãģ¾ãģĹãĤĩãģĨ": 26513, - "Ġsubsequently": 26514, - "Ġdecentral": 26515, - "Ġgenetics": 26516, - "Ġê´ij": 26517, - "Ġmonitors": 26518, - "ĠApplic": 26519, - "ĠReporter": 26520, - "wert": 26521, - "Ġwiem": 26522, - "ĠMovement": 26523, - "Ġinterviewing": 26524, - "Ġhairs": 26525, - "Ġpuò": 26526, - "ĠChelsea": 26527, - "Ġcoher": 26528, - "Ġcot": 26529, - "Ġzas": 26530, - "Ġpatches": 26531, - "Ġlah": 26532, - "Ñĥнк": 26533, - "ĠReagan": 26534, - "ĠMarco": 26535, - "city": 26536, - "Ġdefender": 26537, - "Ġdecoration": 26538, - "iji": 26539, - "Ġlitter": 26540, - "Ш": 26541, - "Ġjego": 26542, - "REW": 26543, - "ĠPik": 26544, - "ĠHee": 26545, - "ĠIv": 26546, - "Ġиде": 26547, - "ĠTheater": 26548, - "ĠÑĩаÑģÑĤо": 26549, - "Ġsweater": 26550, - "Ġhighlighting": 26551, - "Ġainsi": 26552, - "Ġdiplomatic": 26553, - "ĠNevertheless": 26554, - "å³": 26555, - "ASON": 26556, - "Ġpúblico": 26557, - "Ġferm": 26558, - "reated": 26559, - "cod": 26560, - "Ġ물ë": 26561, - "Ġmister": 26562, - "ĠVancouver": 26563, - "Ġrecognizes": 26564, - "ecd": 26565, - "Ġcomplications": 26566, - "encial": 26567, - "ãģĹãģı": 26568, - "Ġê°Ģì§Ģ": 26569, - "ĠUltimate": 26570, - "Ġvaig": 26571, - "ĠMerry": 26572, - "×ķ×Ĵ": 26573, - "ĠMarcus": 26574, - "總": 26575, - "owego": 26576, - "Ġmente": 26577, - "Sm": 26578, - "Ġaja": 26579, - "ĠTao": 26580, - "Ġjudicial": 26581, - "Ġentrepreneurship": 26582, - "Ġнемного": 26583, - "Ġpis": 26584, - "Ġerg": 26585, - "Ġchrist": 26586, - "ĠCurt": 26587, - "ĠÑĢаÑģп": 26588, - "λε": 26589, - "ensch": 26590, - "ÃŃre": 26591, - "Ġfocal": 26592, - "ĠDiamond": 26593, - "avÃŃa": 26594, - "Ġhanno": 26595, - "ĠSquad": 26596, - "Ġassociations": 26597, - "ĠCreative": 26598, - "Ġmessenger": 26599, - "Ġbegging": 26600, - "Ġdecimal": 26601, - "ĠdÄ±ÅŁ": 26602, - "Ġmetadata": 26603, - "sels": 26604, - "ĠÄ°ÅŁ": 26605, - "ữa": 26606, - "Ġdifficile": 26607, - "dı": 26608, - "Ġslaughter": 26609, - "ĠVerg": 26610, - "Ġ×Ĵ×Ŀ": 26611, - "ç°¡": 26612, - "æĮī": 26613, - "ĠTea": 26614, - "asses": 26615, - "Ok": 26616, - "Ġsynthes": 26617, - "otiation": 26618, - "Ġpainter": 26619, - "Ġelbows": 26620, - "Ġarchitectural": 26621, - "ĠÑĢад": 26622, - "Ġglor": 26623, - "image": 26624, - "ampa": 26625, - "culiar": 26626, - "ł¨": 26627, - "Ġteve": 26628, - "ĠStelle": 26629, - "ĠBam": 26630, - "Ġì´Ī": 26631, - "asis": 26632, - "ipedia": 26633, - "ĠGI": 26634, - "ĠActive": 26635, - "çĦ¶åIJİ": 26636, - "azi": 26637, - "ãĤĮãģ¦": 26638, - "ĠLucky": 26639, - "íķ©": 26640, - "ĠпÑĢиÑħод": 26641, - "Ġrunway": 26642, - "Ġauthentication": 26643, - "Ġposible": 26644, - "Ġsupplements": 26645, - "Ġsurgical": 26646, - "Gen": 26647, - "Ġfeasible": 26648, - "DO": 26649, - "Ġoutlook": 26650, - "Ġintervals": 26651, - "Ġanecd": 26652, - "Ãłng": 26653, - "Ġstraps": 26654, - "ĠShu": 26655, - "udd": 26656, - "issenschaft": 26657, - "Ġporte": 26658, - "Ġcommitting": 26659, - "Ġalley": 26660, - "Ġcovenant": 26661, - "ĠPedro": 26662, - "lessness": 26663, - "ĠSolid": 26664, - "ĠMolly": 26665, - "ĠнекоÑĤоÑĢ": 26666, - "Ġcooperate": 26667, - "åĮĹ": 26668, - "ollen": 26669, - "Ġtuna": 26670, - "Ġkindergarten": 26671, - "ĠSiz": 26672, - "Ġdużo": 26673, - "ĠMBA": 26674, - "ĠGEORGE": 26675, - "ĠFisher": 26676, - "å¿ĺ": 26677, - "ĠCaesar": 26678, - "ĠкÑĢаÑģив": 26679, - "ĠDelhi": 26680, - "zym": 26681, - "Ġexplicar": 26682, - "ê°Ģì§Ģ": 26683, - "uns": 26684, - "grow": 26685, - "ĠпÑĢиÑģ": 26686, - "Ġ86": 26687, - "Ġstating": 26688, - "Ġmassa": 26689, - "chter": 26690, - "Ġì»¬ëŁ¬": 26691, - "Ġdeputy": 26692, - "SM": 26693, - "noc": 26694, - "Ġgeography": 26695, - "ĠEnterprise": 26696, - "ĠCant": 26697, - "öz": 26698, - "Ġunpack": 26699, - "ĠíĻĶë": 26700, - "Ġsearches": 26701, - "Ġpresidency": 26702, - "Ġtrivial": 26703, - "Ġpige": 26704, - "oubt": 26705, - "ãĤļ": 26706, - "ì¼ĢìĿ´": 26707, - "Ġbudgets": 26708, - "Ġub": 26709, - "Ġpne": 26710, - "ĠYale": 26711, - "ĠÅŁÃ¶yle": 26712, - "regular": 26713, - "Ġimperfect": 26714, - "ARA": 26715, - "ĠfamÃŃlia": 26716, - "urm": 26717, - "ĠAdventure": 26718, - "ãĥĬ": 26719, - "cis": 26720, - "emark": 26721, - "Ġnego": 26722, - "Ġinappropriate": 26723, - "ĠпÑĢиз": 26724, - "ĠÑĢол": 26725, - "Ġdreamed": 26726, - "Bry": 26727, - "Ġshuttle": 26728, - "Ġpillars": 26729, - "Ġbik": 26730, - "inum": 26731, - "ĠÑĥÑģ": 26732, - "ĠNebr": 26733, - "Ġperpendicular": 26734, - "Ġbooked": 26735, - "bery": 26736, - "Ġvikt": 26737, - "bear": 26738, - "esus": 26739, - "Ġвозможно": 26740, - "¨¹": 26741, - "Ġpresumably": 26742, - "ĠMemphis": 26743, - "Ġambulance": 26744, - "×ķ×ŀר": 26745, - "Ġthumbnail": 26746, - "Ġmodification": 26747, - "éĩı": 26748, - "Ġinterpreted": 26749, - "Ġpromo": 26750, - "Ġκά": 26751, - "ĠεÏĢ": 26752, - "Ġacoustic": 26753, - "ĠDB": 26754, - "åĵİ": 26755, - "Ġnonetheless": 26756, - "oule": 26757, - "Ġpequ": 26758, - "Ġknob": 26759, - "ãĤ£": 26760, - "ĠëıĮìķĦ": 26761, - "Ġpurchases": 26762, - "ĠÃĩünkü": 26763, - "Ġdividing": 26764, - "perform": 26765, - "raction": 26766, - "healthy": 26767, - "ĠTitle": 26768, - "Ġuk": 26769, - "Ġcerca": 26770, - "Ġarguably": 26771, - "Ġfale": 26772, - "ë³µ": 26773, - "Ġgamers": 26774, - "Ġutilizing": 26775, - "Ġoffended": 26776, - "Ġtava": 26777, - "alı": 26778, - "Ġmedian": 26779, - "Ġinfectious": 26780, - "ĠAnnie": 26781, - "Ġsmartphones": 26782, - "Ġparole": 26783, - "åĸĿ": 26784, - "ĠEpic": 26785, - "zza": 26786, - "Ġunified": 26787, - "Ġê·¸ëķĮ": 26788, - "Ġcurtain": 26789, - "ĠÄĥ": 26790, - "Ġsexually": 26791, - "Ġunserem": 26792, - "ĠConvention": 26793, - "Ġallegedly": 26794, - "Ya": 26795, - "ĠHoo": 26796, - "enment": 26797, - "æĢª": 26798, - "íĽĦ": 26799, - "Ġgigantic": 26800, - "Ġnoting": 26801, - "Ġrebo": 26802, - "ĠJama": 26803, - "ĠAlz": 26804, - "Ġborrowed": 26805, - "침": 26806, - "Ġperipher": 26807, - "оÑĤа": 26808, - "ĠGB": 26809, - "ĠGear": 26810, - "Ġeconomically": 26811, - "Ġtelefon": 26812, - "Ġqueremos": 26813, - "ĠдалÑĮÑĪе": 26814, - "Ġras": 26815, - "ĠTeach": 26816, - "icios": 26817, - "atos": 26818, - "Ġpledge": 26819, - "bau": 26820, - "ĠHimself": 26821, - "Link": 26822, - "Ġespero": 26823, - "Ġchromos": 26824, - "ĠPER": 26825, - "Ġerle": 26826, - "Ġpodium": 26827, - "ços": 26828, - "Ġnieu": 26829, - "Ġfen": 26830, - "ĠGOD": 26831, - "ĠChocolate": 26832, - "werk": 26833, - "Ġtừ": 26834, - "Ġsuppress": 26835, - "λη": 26836, - "Ġ240": 26837, - "Ġsitä": 26838, - "Ġhonesty": 26839, - "ĠBio": 26840, - "ĠBard": 26841, - "ĠобÑīем": 26842, - "ĠмÑĥз": 26843, - "Ġmarble": 26844, - "ĠÑĨенÑĤ": 26845, - "Ġprocure": 26846, - "Ġrotor": 26847, - "bern": 26848, - "Ġtuh": 26849, - "Ġheadset": 26850, - "atem": 26851, - "Ġwarranty": 26852, - "à®´": 26853, - "Ġfiling": 26854, - "ιά": 26855, - "Ġcomprendre": 26856, - "Ġimpulse": 26857, - "Ġsalv": 26858, - "written": 26859, - "Ġinstitute": 26860, - "Kim": 26861, - "ĠLGBTQ": 26862, - "ficiente": 26863, - "His": 26864, - "ĠαÏħÏĦÏĮ": 26865, - "Ġteenage": 26866, - "orus": 26867, - "ĠÑĢазб": 26868, - "See": 26869, - "ĠConserv": 26870, - "á»ģn": 26871, - "fulness": 26872, - "Ġstrawberries": 26873, - "ĠAbu": 26874, - "ион": 26875, - "Ġolla": 26876, - "NOISE": 26877, - "ĠEmploy": 26878, - "Ġwiped": 26879, - "urger": 26880, - "Ġmodifications": 26881, - "Ġíķĺì§Ģ": 26882, - "Ġfootsteps": 26883, - "Ġhonors": 26884, - "Ġadul": 26885, - "Ġflipping": 26886, - "ĠHU": 26887, - "ZY": 26888, - "Ġintegrating": 26889, - "بر": 26890, - "ulla": 26891, - "Ġnatuurlijk": 26892, - "ĠíĹĪ": 26893, - "ĠEthereum": 26894, - "ÙĬÙĦ": 26895, - "wed": 26896, - "Ġpeaks": 26897, - "ĠKes": 26898, - "Ġbloom": 26899, - "Ġcrashing": 26900, - "Ġ911": 26901, - "ĠоÑĤлиÑĩ": 26902, - "Ġcontrollers": 26903, - "ĠDod": 26904, - "ĠвмеÑģÑĤе": 26905, - "Ġsortir": 26906, - "å¥ĩ": 26907, - "ĠStraight": 26908, - "ĠGracias": 26909, - "Ġgroove": 26910, - "Ġtogg": 26911, - "Ġìĭ¶ìĿĢ": 26912, - "éro": 26913, - "Ġoutward": 26914, - "ĠWA": 26915, - "ĠRocky": 26916, - "Ġscam": 26917, - "Ġhayat": 26918, - "ignty": 26919, - "âĦ": 26920, - "plings": 26921, - "Ġantibiotics": 26922, - "Ġä¸Ģ": 26923, - "Ġnevertheless": 26924, - "jang": 26925, - "commerce": 26926, - "Ġspoiler": 26927, - "Ġglove": 26928, - "Ġchatter": 26929, - "ĠBY": 26930, - "~?": 26931, - "Ġíĺ¸": 26932, - "Ġdemol": 26933, - "wechsel": 26934, - "imir": 26935, - "Ġraid": 26936, - "еÑĢÑħ": 26937, - "ìŀIJ기": 26938, - "enf": 26939, - "Ġcommented": 26940, - "Ġoptimized": 26941, - "Ġconvicted": 26942, - "Ġbats": 26943, - "ĠSB": 26944, - "ĠAur": 26945, - "ĠTong": 26946, - "Ġimplicit": 26947, - "ĠJanet": 26948, - "Ġreag": 26949, - "ãģ²": 26950, - "ĠAdvanced": 26951, - "Ġimpose": 26952, - "ש×Ķ": 26953, - "Ġschemes": 26954, - "ougher": 26955, - "abolic": 26956, - "Ġê±°ì£ł": 26957, - "Ġslowing": 26958, - "Ġwtedy": 26959, - "Ġdestructive": 26960, - "ĠопÑĢед": 26961, - "Ġlandmark": 26962, - "ĠëıĪ": 26963, - "ĠWalking": 26964, - "ẹ": 26965, - "Ġtijd": 26966, - "ĠKN": 26967, - "ĠQuant": 26968, - "ìĺ¤ë": 26969, - "ĠкÑĢÑĥ": 26970, - "Ġperder": 26971, - "Ġnove": 26972, - "ände": 26973, - "ĠãģĹ": 26974, - "bia": 26975, - "Ġcustody": 26976, - "Ġbiod": 26977, - "æĿ±è¥¿": 26978, - "Ġdirecting": 26979, - "...âĢĭ": 26980, - "Ġreloc": 26981, - "Ġdemande": 26982, - "ãĤĵãģł": 26983, - "ĠoÄŁlum": 26984, - "Ġодна": 26985, - "ĠMilk": 26986, - "åı·": 26987, - "ĠKra": 26988, - "ĠHonda": 26989, - "Ġpue": 26990, - "Ġelekt": 26991, - "Ġbeginners": 26992, - "Ġspear": 26993, - "ÃŃnh": 26994, - "ĠLuft": 26995, - "Ġnig": 26996, - "ĠSchools": 26997, - "Ġforums": 26998, - "ĠQin": 26999, - "ppo": 27000, - "Ġzag": 27001, - "ĠЮ": 27002, - "Ġtoothp": 27003, - "ĠStyle": 27004, - "ì´Ī": 27005, - "Ġpunct": 27006, - "Ġreps": 27007, - "ĠAly": 27008, - "Ġamendments": 27009, - "Ġöz": 27010, - "Ġdigits": 27011, - "urai": 27012, - "Ġchaotic": 27013, - "ĠMasters": 27014, - "eon": 27015, - "ĠCash": 27016, - "ĠCuz": 27017, - "Ġbedeutet": 27018, - "Ġscanning": 27019, - "Ġжд": 27020, - "неÑĤ": 27021, - "Ġcertainty": 27022, - "jek": 27023, - "Ġdijo": 27024, - "ĠClimate": 27025, - "Ġrinse": 27026, - "Ġkrij": 27027, - "veland": 27028, - "Ġsoundtrack": 27029, - "ĠSafe": 27030, - "ĠNova": 27031, - "94": 27032, - "Ġathe": 27033, - "ĠVerb": 27034, - "oler": 27035, - "ìĿ´ì£ł": 27036, - "Ġvin": 27037, - "Ġrespiratory": 27038, - "ĠStudy": 27039, - "ĠCAM": 27040, - "Ġavocado": 27041, - "ĠZhen": 27042, - "Ġlatency": 27043, - "Ġfeathers": 27044, - "Ġcontar": 27045, - "ĠвеÑī": 27046, - "Ġfark": 27047, - "Ġblended": 27048, - "Ġexploded": 27049, - "ĠXX": 27050, - "ĠBenim": 27051, - "Ġalguém": 27052, - "istoire": 27053, - "Ġconfidential": 27054, - "Ġmast": 27055, - "Ġì¿": 27056, - "geh": 27057, - "Ġdisrespect": 27058, - "ĠSystems": 27059, - "Æ°a": 27060, - "Ed": 27061, - "Ġwys": 27062, - "Ġexotic": 27063, - "Ġglowing": 27064, - "ùng": 27065, - "ounge": 27066, - "èĦ": 27067, - "аниз": 27068, - "Ġpalav": 27069, - "ĠSword": 27070, - "Ġgim": 27071, - "ĠCrow": 27072, - "Ġpotent": 27073, - "bish": 27074, - "Ġabused": 27075, - "ĠJed": 27076, - "Ġgambling": 27077, - "ĠSpect": 27078, - "Ġinvestigators": 27079, - "æĻļ": 27080, - "Ġratt": 27081, - "Ġdob": 27082, - "ĠDES": 27083, - "hog": 27084, - "ĠоÑĤкÑĢÑĭ": 27085, - "íĮħ": 27086, - "ĠденÑĮги": 27087, - "Ġíĺ¹": 27088, - "Ġ머리": 27089, - "Ġsaturation": 27090, - "Ġinherited": 27091, - "ĠInnovation": 27092, - "ìĹĪëįĺ": 27093, - "Ġtangible": 27094, - "Ġdepri": 27095, - "hed": 27096, - "Ġпомог": 27097, - "Ġsliced": 27098, - "à¥į": 27099, - "Ġthế": 27100, - "Å¥": 27101, - "68": 27102, - "Ġcorona": 27103, - "Ġgifted": 27104, - "Ġsoir": 27105, - "Ġhumility": 27106, - "ĠìĿ´ê±¸": 27107, - "Ġflaws": 27108, - "ĠпÑĢакÑĤи": 27109, - "Ġkald": 27110, - "waż": 27111, - "yw": 27112, - "ãĤĵãģ§ãģĻ": 27113, - "irteen": 27114, - "Ġcrochets": 27115, - "¦¬ê°Ģ": 27116, - "ĠìłĦìĹIJ": 27117, - "Ġdese": 27118, - "æ¥Ń": 27119, - "Ġмаг": 27120, - "ĠdziaÅĤ": 27121, - "Ġlég": 27122, - "changing": 27123, - "Ġllev": 27124, - "ÅĦsk": 27125, - "çĶ»": 27126, - "Ġ1984": 27127, - "orns": 27128, - "ĠWelsh": 27129, - "Ġpharmaceutical": 27130, - "Ġpumping": 27131, - "ĠShaw": 27132, - "punk": 27133, - "Ġvault": 27134, - "Ġkinetic": 27135, - "Ġhurricane": 27136, - "ĠIncluding": 27137, - "ức": 27138, - "ĠGrandpa": 27139, - "anship": 27140, - "é¦Ļ港": 27141, - "ĠвÑĭÑħод": 27142, - "нож": 27143, - "ľł": 27144, - "utta": 27145, - "Ġê²ģëĭĪëĭ¤": 27146, - "Ġbaz": 27147, - "ĠпоÑĪ": 27148, - "Ġpeculiar": 27149, - "zyÄĩ": 27150, - "ĠEllie": 27151, - "Ġlearns": 27152, - "ĠKrishna": 27153, - "Ġconsecut": 27154, - "Ġempath": 27155, - "ĠDin": 27156, - "Ġtraded": 27157, - "ĠBoris": 27158, - "uggage": 27159, - "olla": 27160, - "Ġназв": 27161, - "Ġeternity": 27162, - "Ġвп": 27163, - "èmes": 27164, - "Ġgrapp": 27165, - "bé": 27166, - "ĠпÑĢедÑģÑĤав": 27167, - "ĠFC": 27168, - "įëĭĪëĭ¤": 27169, - "even": 27170, - "ĠNebraska": 27171, - "ortune": 27172, - "Ġkarena": 27173, - "ĠAgent": 27174, - "Ġsting": 27175, - "ĠPI": 27176, - "Ġmunicipal": 27177, - "powered": 27178, - "Ġconsegue": 27179, - "ĠManchester": 27180, - "Ġrainy": 27181, - "Ġbli": 27182, - "Ġkost": 27183, - "Ġhalten": 27184, - "ĠAhhh": 27185, - "insula": 27186, - "erting": 27187, - "ĠاÙĦÙģ": 27188, - "Ġrelacion": 27189, - "Ġkomen": 27190, - "Ġdome": 27191, - "Ġpriests": 27192, - "ĠIntrodu": 27193, - "rophe": 27194, - "shore": 27195, - "velt": 27196, - "clipse": 27197, - "ĠÑĢÑĥÑģ": 27198, - "×Ļס": 27199, - "Ġsabemos": 27200, - "ĠHolland": 27201, - "ogi": 27202, - "anki": 27203, - "ĠMats": 27204, - "Ġsmoked": 27205, - "ullie": 27206, - "Ġeurope": 27207, - "ĠдейÑģÑĤвиÑĤелÑĮно": 27208, - "Ġbardziej": 27209, - "Ġtransforming": 27210, - "ĠEz": 27211, - "opath": 27212, - "Ġìĸ¸ëĭĪ": 27213, - "ĠÑģÑĤан": 27214, - "ằng": 27215, - "ัà¹ī": 27216, - "ĠOuch": 27217, - "Ġclearance": 27218, - "ustain": 27219, - "Ġsolidarity": 27220, - "Ġproving": 27221, - "ĠÐĺн": 27222, - "ĠÑģÑĬ": 27223, - "Ġprolong": 27224, - "адно": 27225, - "Ġsos": 27226, - "ĠDeal": 27227, - "Ġ170": 27228, - "mons": 27229, - "Ġзем": 27230, - "Ġlogged": 27231, - "Ġlifelong": 27232, - "Ġsensory": 27233, - "Ġbehold": 27234, - "ĠFAR": 27235, - "ètement": 27236, - "ĠFederation": 27237, - "Ġdodge": 27238, - "ĠShir": 27239, - "Ġdragons": 27240, - "ĠArctic": 27241, - "Äħż": 27242, - "Åį": 27243, - "º": 27244, - "Ġdenke": 27245, - "ĠpodrÃŃa": 27246, - "cole": 27247, - "ÑĥлÑĮÑĤаÑĤ": 27248, - "Ġsystematic": 27249, - "ама": 27250, - "chos": 27251, - "Ġclinics": 27252, - "ĠBS": 27253, - "Ġtales": 27254, - "usions": 27255, - "ĠíĪ¬": 27256, - "Ġpreservation": 27257, - "Ġlore": 27258, - "ĠProtest": 27259, - "Ỽ": 27260, - "å¸Ĥ": 27261, - "Ġacknowledged": 27262, - "ĠIsaiah": 27263, - "ĠëķĮëĬĶ": 27264, - "Ġ×ĺ": 27265, - "Ġcompetitor": 27266, - "Ġadvancing": 27267, - "zip": 27268, - "Ġtenth": 27269, - "ĠLaure": 27270, - "Ġhints": 27271, - "Ġexercising": 27272, - "ŀľë": 27273, - "ĠIntelligence": 27274, - "uated": 27275, - "OUT": 27276, - "oped": 27277, - "Ġautonomy": 27278, - "Ġbranding": 27279, - "ĠMediterranean": 27280, - "Ñĸк": 27281, - "Ġscrewdriver": 27282, - "Ġsupre": 27283, - "Ġstap": 27284, - "Ġjurisdiction": 27285, - "ĠSettings": 27286, - "Ġforefront": 27287, - "ĠFemale": 27288, - "comfort": 27289, - "Ġmultiplication": 27290, - "ĠMurray": 27291, - "Ġbob": 27292, - "ĠTas": 27293, - "Ġtahu": 27294, - "Ġonun": 27295, - "etter": 27296, - "Ġprophets": 27297, - "lag": 27298, - "Ġrevenues": 27299, - "Ġprá": 27300, - "Ġuploading": 27301, - "Ġmachinery": 27302, - "ascal": 27303, - "ĠEstá": 27304, - "ĠGoth": 27305, - "ĠBald": 27306, - "ĠSaw": 27307, - "Ġstripes": 27308, - "ìłij": 27309, - "Ġpowin": 27310, - "æĹ¥æľ¬": 27311, - "Ġhostile": 27312, - "Ġdarum": 27313, - "Ġprevented": 27314, - "ожалÑĥйÑģÑĤа": 27315, - "Ġalgunas": 27316, - "Ġhopeless": 27317, - "Ġznaj": 27318, - "Ġreadings": 27319, - "Ġcraving": 27320, - "tat": 27321, - "ĠPig": 27322, - "Ġliar": 27323, - "çĪ±": 27324, - "Ġmultiplayer": 27325, - "Ġdale": 27326, - "ĠCourse": 27327, - "íģ¼": 27328, - "ĠKita": 27329, - "Ġcustoms": 27330, - "Ġresponds": 27331, - "endra": 27332, - "è¦ĸ": 27333, - "Ġmetro": 27334, - "Ñģол": 27335, - "Ġmitigate": 27336, - "Ġoppression": 27337, - "ĠæĪijåĢij": 27338, - "quinho": 27339, - "Ġammo": 27340, - "Ġenfer": 27341, - "Ġpony": 27342, - "Ġounces": 27343, - "°Ķ": 27344, - "ĠìĪĺê°Ģ": 27345, - "Ġdicho": 27346, - "ĠDeb": 27347, - "Ġwonders": 27348, - "ĠRoose": 27349, - "Ġprizes": 27350, - "ĠALEX": 27351, - "Ġthankfully": 27352, - "Ġtissues": 27353, - "ĠÑĢавно": 27354, - "ĠLuna": 27355, - "intelligible": 27356, - "ĠìĻ¸": 27357, - "ê°ij": 27358, - "ĠHeat": 27359, - "ĠÑģид": 27360, - "ĠQui": 27361, - "Ġions": 27362, - "Ġaccommodation": 27363, - "便": 27364, - "ĠKart": 27365, - "ienst": 27366, - "Ġtarde": 27367, - "Ġsoaked": 27368, - "ĠCasey": 27369, - "Ġì´Ŀ": 27370, - "ĠÑĢÑĥб": 27371, - "Ġdifferenti": 27372, - "Ġleftover": 27373, - "Ġexchanges": 27374, - "second": 27375, - "Ġfirstly": 27376, - "Ġbuilder": 27377, - "rien": 27378, - "Ġdw": 27379, - "Ġbouncing": 27380, - "?<": 29986, - "ologÃŃa": 29987, - "wealth": 29988, - "Ġmeditate": 29989, - "ĵ¤ìĿĺ": 29990, - "ĠCraft": 29991, - "è§īå¾Ĺ": 29992, - "æĻ®": 29993, - "riv": 29994, - "ĠAgainst": 29995, - "Ġceramic": 29996, - "espère": 29997, - "Ġcompetent": 29998, - "ĠHopkins": 29999, - "Ġkilos": 30000, - "Ġgravel": 30001, - "Ġpiston": 30002, - "Ġfriendships": 30003, - "Ġescre": 30004, - "Ġvoz": 30005, - "ĠGesellschaft": 30006, - "Ġunterstüt": 30007, - "Ġmuj": 30008, - "Ġwarnings": 30009, - "pos": 30010, - "ĠProfessional": 30011, - "wszy": 30012, - "odle": 30013, - "bands": 30014, - "Ġteamwork": 30015, - "stellung": 30016, - "Ġdx": 30017, - "åįĬ": 30018, - "Ġattorneys": 30019, - "Ġweitere": 30020, - "ãħĭãħĭãħĭ": 30021, - "ĠOriginal": 30022, - "×Ļ×Ĺ": 30023, - "Ġbroadcasting": 30024, - "ĠпеÑĢвÑĭй": 30025, - "uchi": 30026, - "Ġheure": 30027, - "Ġgrabs": 30028, - "ĠWOR": 30029, - "ĠPlaid": 30030, - "Min": 30031, - "Ġpaz": 30032, - "ĠPuis": 30033, - "umu": 30034, - "itates": 30035, - "Ġcoats": 30036, - "Ġbuen": 30037, - "Ġheir": 30038, - "Ġpneum": 30039, - "שר": 30040, - "enser": 30041, - "ĠJUDGE": 30042, - "Ġblonde": 30043, - "á¹Ľ": 30044, - "Ġgak": 30045, - "Ġsık": 30046, - "Ġquoted": 30047, - "Ġequipo": 30048, - "Ġwishing": 30049, - "ÃŃcia": 30050, - "Ġverbs": 30051, - "çµĦ": 30052, - "ĠCanadians": 30053, - "Ġgoverning": 30054, - "ĠEvans": 30055, - "Euro": 30056, - "Ġgenres": 30057, - "Ġunterschied": 30058, - "ĠBecky": 30059, - "³¼ê²ĮìļĶ": 30060, - "Ġeinge": 30061, - "ĠRaise": 30062, - "oland": 30063, - "ĠStrateg": 30064, - "Ġeres": 30065, - "ĠVeterans": 30066, - "Ġbreakout": 30067, - "Ġsanté": 30068, - "Ġadel": 30069, - "Ġinvestigated": 30070, - "Ġpeur": 30071, - "Ġagile": 30072, - "Ġrailroad": 30073, - "anska": 30074, - "Ġей": 30075, - "Ġexpos": 30076, - "atories": 30077, - "ĠContent": 30078, - "Ġtruths": 30079, - "ĠTrail": 30080, - "Ġgua": 30081, - "Ġpores": 30082, - "Ġwritings": 30083, - "ĠUhr": 30084, - "ĠThats": 30085, - "Ġicing": 30086, - "OC": 30087, - "ĠProduction": 30088, - "Ġcarne": 30089, - "ISS": 30090, - "Ġninguém": 30091, - "non": 30092, - "Ġvicious": 30093, - "×ķ×Ķ": 30094, - "Ġreconnect": 30095, - "Ġcentres": 30096, - "ĠKem": 30097, - "Ġcrease": 30098, - "ĠìĿ´ë¯¸": 30099, - "айÑĤеÑģÑĮ": 30100, - "ĠбоÑĢ": 30101, - "ĠHayır": 30102, - "ĠÑģÑĥд": 30103, - "Ġúnica": 30104, - "owaÅĤ": 30105, - "Ġadher": 30106, - "hua": 30107, - "ZZ": 30108, - "Ġpreciso": 30109, - "Ġcurrents": 30110, - "Ġseasoned": 30111, - "ĠIoT": 30112, - "ĠBishop": 30113, - "è¨Ī": 30114, - "sted": 30115, - "ĠBernard": 30116, - "ì¤ĺ": 30117, - "æ²»": 30118, - "ĠGlenn": 30119, - "Ġktórym": 30120, - "ืà¹Ī": 30121, - "Ġastrolog": 30122, - "ĠKot": 30123, - "å¤ľ": 30124, - "Ġparfois": 30125, - "Ġforwards": 30126, - "ĠWiÄĻ": 30127, - "ĠÎĺ": 30128, - "Ġnano": 30129, - "è»į": 30130, - "sub": 30131, - "ĠBrill": 30132, - "Ġgrit": 30133, - "Ġcited": 30134, - "gado": 30135, - "Ġmelts": 30136, - "Ġforcé": 30137, - "âĸĪâĸĪ": 30138, - "Ġbajo": 30139, - "Ġdiscretion": 30140, - "°°": 30141, - "ativity": 30142, - "Ġsituated": 30143, - "ãĥ«ãĤ¯": 30144, - "Ñīее": 30145, - "åľ°æĸ¹": 30146, - "ĠпÑĢинÑĨип": 30147, - "amaz": 30148, - "Ġaquarium": 30149, - "Ġdissolve": 30150, - "ĠGods": 30151, - "Super": 30152, - "Ġamid": 30153, - "zk": 30154, - "ĠãģĦ": 30155, - "éłIJ": 30156, - "ampf": 30157, - "Ġhela": 30158, - "'!": 30159, - "Ġdevelopmental": 30160, - "ĠDise": 30161, - "ĠÑĢабоÑĤаеÑĤ": 30162, - "Ġsnapshot": 30163, - "好好": 30164, - "Õ¸": 30165, - "ĠYue": 30166, - "ĠHulk": 30167, - "ĠDoom": 30168, - "ĠFelix": 30169, - "Ġréf": 30170, - "Male": 30171, - "ç·Ĭ": 30172, - "phants": 30173, - "ENS": 30174, - "ĠMechan": 30175, - "ĠGolf": 30176, - "åĨįè¦ĭ": 30177, - "Ġgenerosity": 30178, - "ätze": 30179, - "Ġunlocked": 30180, - "ĠãĤĴ": 30181, - "íĥģ": 30182, - "ocalypse": 30183, - "Alright": 30184, - "Ġê°ľë": 30185, - "Ġ×IJ×ij׾": 30186, - "ĠKeeping": 30187, - "Ġcollaborating": 30188, - "chief": 30189, - "ĠFernando": 30190, - "Ġchefs": 30191, - "ĠíĶ¼ë¶Ģ": 30192, - "Ġskipped": 30193, - "Ġpersonn": 30194, - "Ġaxe": 30195, - "chez": 30196, - "Ġextraction": 30197, - "ĠAV": 30198, - "ĠGibbs": 30199, - "Ġíľ": 30200, - "Ġsı": 30201, - "IAM": 30202, - "View": 30203, - "ĠGRANT": 30204, - "Ġ몸": 30205, - "Ġverification": 30206, - "Ġdepicted": 30207, - "ĠMoz": 30208, - "oux": 30209, - "Ġtul": 30210, - "Ġscanner": 30211, - "Ġcomedian": 30212, - "ĠVolks": 30213, - "ĠJEFF": 30214, - "è¨Ĥéĸ±": 30215, - "§Ħ": 30216, - "Ġdistraction": 30217, - "rá": 30218, - "ĠINTER": 30219, - "Ġsincer": 30220, - "Ġ×ŀת": 30221, - "Ġש׳": 30222, - "Ġconstructive": 30223, - "arf": 30224, - "ĠëĪĦë": 30225, - "Ġeco": 30226, - "ramos": 30227, - "Ġrenewed": 30228, - "inement": 30229, - "ĠUb": 30230, - "ĠPepper": 30231, - "ì§Ģê°Ģ": 30232, - "ĠDarwin": 30233, - "Ġmerchand": 30234, - "Ġvárias": 30235, - "èce": 30236, - "NG": 30237, - "ĠìľĦíķ´ìĦľ": 30238, - "ĠакÑĤив": 30239, - "ĠUnters": 30240, - "عÙĦ": 30241, - "Ġintric": 30242, - "omma": 30243, - "ieving": 30244, - "ĠCaroline": 30245, - "åĵģ": 30246, - "ĠPRES": 30247, - "Ġperformer": 30248, - "Ġautour": 30249, - "ãģ¾ãģĽãĤĵ": 30250, - "Ġutterly": 30251, - "Ġsynthesis": 30252, - "Ġlesbian": 30253, - "Ġretrieve": 30254, - "Ġmaneira": 30255, - "Ġimpair": 30256, - "Ġmentoring": 30257, - "ĠSouls": 30258, - "ĠGoPro": 30259, - "ÑĢаÑĤÑĮ": 30260, - "Ġcose": 30261, - "ĠSSD": 30262, - "IRE": 30263, - "Ġupfront": 30264, - "ĠAun": 30265, - "Ġgamer": 30266, - "Ġlitt": 30267, - "Ġaggression": 30268, - "ĠLikewise": 30269, - "ĠBetty": 30270, - "ĠDart": 30271, - "ĠDLC": 30272, - "ishment": 30273, - "ìŀ¥ìĿĦ": 30274, - "Ġ对": 30275, - "ç»ı": 30276, - "cream": 30277, - "ĠBabylon": 30278, - "Ġnug": 30279, - "brar": 30280, - "Ġaynı": 30281, - "amily": 30282, - "bike": 30283, - "ahahaha": 30284, - "loyd": 30285, - "Ġmira": 30286, - "Ġperme": 30287, - "ĠGaming": 30288, - "Ġfirmware": 30289, - "Ma": 30290, - "Ġassisted": 30291, - "atics": 30292, - "Ġìķŀìľ¼ë¡ľ": 30293, - "ĠMental": 30294, - "niejs": 30295, - "ĠIz": 30296, - "owÄħ": 30297, - "Ġtougher": 30298, - "Ġdeed": 30299, - "èĭ¦": 30300, - "Ġstylish": 30301, - "ĠTools": 30302, - "ĠHamp": 30303, - "Ġsunscreen": 30304, - "Ġarticulate": 30305, - "iye": 30306, - "иÑĦ": 30307, - "ĠSpread": 30308, - "ĠHAVE": 30309, - "Ġswirl": 30310, - "Ġsponsoring": 30311, - "ä»ĭ": 30312, - "iovascular": 30313, - "mesi": 30314, - "Ġrelaxation": 30315, - "ĠÑģвоиÑħ": 30316, - "Ġmargins": 30317, - "ĠsaÄŁ": 30318, - "ĠPride": 30319, - "ĠÏĦοÏħÏĤ": 30320, - "иÑĨи": 30321, - "enci": 30322, - "Does": 30323, - "Ġcorpse": 30324, - "Ġendurance": 30325, - "Ġíŀĺ": 30326, - "ì¹´": 30327, - "Ġhaircut": 30328, - "Ġinterrupted": 30329, - "Ġwindy": 30330, - "ĠCaleb": 30331, - "ÏģÏĩ": 30332, - "ĠPourquoi": 30333, - "Ġholistic": 30334, - "uclear": 30335, - "ĠWhole": 30336, - "士": 30337, - "Act": 30338, - "Ġgallon": 30339, - "cade": 30340, - "ĠRegional": 30341, - "roads": 30342, - "ĠSchne": 30343, - "áng": 30344, - "Ġизмен": 30345, - "ãĤĪãģŃ": 30346, - "Ġmenus": 30347, - "Ġsplitting": 30348, - "Ġpriced": 30349, - "ĠÎĵ": 30350, - "Ġusername": 30351, - "ĠÐŀÑĩ": 30352, - "Ġcompressed": 30353, - "yin": 30354, - "Ġguardian": 30355, - "Ġgoof": 30356, - "Ġchecklist": 30357, - "Ġinterchange": 30358, - "Ġexpedition": 30359, - "Ġextern": 30360, - "Ġinfrared": 30361, - "engo": 30362, - "Ġdenying": 30363, - "Ġpackets": 30364, - "onent": 30365, - "BB": 30366, - "ĠIncre": 30367, - "Ġsini": 30368, - "ÃŁer": 30369, - "èg": 30370, - "maal": 30371, - "generation": 30372, - "Ġminorities": 30373, - "Ġllevar": 30374, - "Ġnomination": 30375, - "Ġconsid": 30376, - "Ġ×ľ×¢": 30377, - "muÅŁ": 30378, - "ĠEsc": 30379, - "Ġnumerator": 30380, - "Ġkaik": 30381, - "Ġktórych": 30382, - "iesen": 30383, - "Ġvê": 30384, - "ĠUSS": 30385, - "ĠPrivate": 30386, - "Ġодно": 30387, - "Ġalém": 30388, - "ÃŃtulo": 30389, - "Ġlimb": 30390, - "Ġforgiven": 30391, - "Ġdisclosure": 30392, - "ÏĦί": 30393, - "Ġningún": 30394, - "Ġtherapeutic": 30395, - "Ġnegotiating": 30396, - "ĠNike": 30397, - "enseful": 30398, - "Ġincap": 30399, - "Ġflagship": 30400, - "town": 30401, - "âĪ": 30402, - "ĠÏĢολ": 30403, - "Ġwolves": 30404, - "Ġviolations": 30405, - "ĠArnold": 30406, - "Ġintervene": 30407, - "Ġheater": 30408, - "Ġrecursos": 30409, - "Ġmaid": 30410, - "ê²¼": 30411, - "ĠдавайÑĤе": 30412, - "ĠCelebr": 30413, - "Ġcape": 30414, - "ĠSty": 30415, - "ainen": 30416, - "site": 30417, - "bij": 30418, - "ĠполÑĮз": 30419, - "Ġframed": 30420, - "Ġpublishers": 30421, - "ĠÑĩÑĥÑĤÑĮ": 30422, - "Ġtemptation": 30423, - "Ġcerteza": 30424, - "Ġexempt": 30425, - "ìĬ¹": 30426, - "selling": 30427, - "ĠTask": 30428, - "hoon": 30429, - "ĠCoc": 30430, - "ĠParks": 30431, - "Ġrepetition": 30432, - "ĠÑĤÑĥда": 30433, - "Ġensl": 30434, - "ĠdeÄŁiÅŁ": 30435, - "ĠOrlando": 30436, - "ĠMainten": 30437, - "æŃ¢": 30438, - "ocument": 30439, - "ĠHC": 30440, - "Ġscooter": 30441, - "ĠнапиÑģ": 30442, - "Ġtighter": 30443, - "Ġtease": 30444, - "Ġremoves": 30445, - "Ġkijken": 30446, - "ĠÑģÑĥÑīеÑģÑĤв": 30447, - "Ġthé": 30448, - "ĠвÑĭглÑıд": 30449, - "Ġrelieve": 30450, - "Ġmitä": 30451, - "Ġstationary": 30452, - "öff": 30453, - "pable": 30454, - "Ġarter": 30455, - "Ġdéf": 30456, - "rative": 30457, - "Ġconect": 30458, - "Ġsaddle": 30459, - "ĠDiane": 30460, - "Ġcommemor": 30461, - "fendim": 30462, - "SÃŃ": 30463, - "Ġíģ´ë": 30464, - "Ġmange": 30465, - "atte": 30466, - "Ġarrogant": 30467, - "Ġrobotic": 30468, - "ĠgiÃł": 30469, - "æĺ¯çļĦ": 30470, - "Ġneighbourhood": 30471, - "isson": 30472, - "Ġдвиж": 30473, - "ĠRI": 30474, - "ĠNorman": 30475, - "brand": 30476, - "amation": 30477, - "Ġrazor": 30478, - "Ġmurders": 30479, - "ĠÑĤÑĥ": 30480, - "Ġwszystkim": 30481, - "Ġutilities": 30482, - "Ġmicroscop": 30483, - "ê¿": 30484, - "Ġdaqui": 30485, - "ollar": 30486, - "ĠÐĶавайÑĤе": 30487, - "Ġannée": 30488, - "Ġkilometres": 30489, - "Ġhomosexual": 30490, - "Ġarchitects": 30491, - "ãģ¡ãģ¯": 30492, - "Ġniye": 30493, - "LER": 30494, - "Ġmicrophones": 30495, - "ĠStunden": 30496, - "Ġconsecutive": 30497, - "ienda": 30498, - "vänd": 30499, - "DER": 30500, - "Ġlifts": 30501, - "ĠMeat": 30502, - "Ġsavez": 30503, - "íĸĪëįĺ": 30504, - "Men": 30505, - "Ġdismant": 30506, - "거를": 30507, - "Ġinsulation": 30508, - "Ġscall": 30509, - "Ġspooky": 30510, - "Ġparc": 30511, - "Ġballet": 30512, - "ĠWhatsApp": 30513, - "Ġfranc": 30514, - "Ġdeliberate": 30515, - "ĠíħĮ": 30516, - "Ġmars": 30517, - "ĠZur": 30518, - "Pr": 30519, - "disciplinary": 30520, - "Ġobsession": 30521, - "ме": 30522, - "Ġmarching": 30523, - "ĠEmergency": 30524, - "iguous": 30525, - "Ġszy": 30526, - "ĠLands": 30527, - "Ġboarding": 30528, - "ĠпоÑĩÑĤи": 30529, - "Ġenvy": 30530, - "Ġcompassionate": 30531, - "Ġmerci": 30532, - "Ġdesirable": 30533, - "dale": 30534, - "Ġcanım": 30535, - "ĠAntar": 30536, - "temps": 30537, - "Ġconfigured": 30538, - "ĠCompared": 30539, - "neh": 30540, - "icating": 30541, - "Ġnickel": 30542, - "ÙĪÙĤ": 30543, - "ÙĥÙĪÙĨ": 30544, - "opes": 30545, - "Ġformulas": 30546, - "ĠÐķÑģÑĤÑĮ": 30547, - "Ġpobl": 30548, - "ĠPJ": 30549, - "ĠLud": 30550, - "ä»ĬåĽŀ": 30551, - "ĠBrid": 30552, - "ĠHog": 30553, - "ĠBris": 30554, - "Jen": 30555, - "Ġshading": 30556, - "ĠYas": 30557, - "Ġdisturbed": 30558, - "Ġrecommending": 30559, - "Ġcé": 30560, - "ĠHOW": 30561, - "ìĹĪìĸ´": 30562, - "Ġreversed": 30563, - "ĠInterestingly": 30564, - "ioxid": 30565, - "åħŃ": 30566, - "Ġìĺ¤ì¼ĢìĿ´": 30567, - "ếu": 30568, - "xx": 30569, - "Ġouais": 30570, - "ĠYouTubers": 30571, - "ĠRosa": 30572, - "ĠHaupt": 30573, - "jadi": 30574, - "Ġvlogs": 30575, - "Ġcultura": 30576, - "ĠLeadership": 30577, - "ĠHep": 30578, - "Ġillum": 30579, - "´ëıĻ": 30580, - "Ġcustomized": 30581, - "Ġmarca": 30582, - "Ġquatro": 30583, - "Ġнаг": 30584, - "ĠSpaceX": 30585, - "ĠEigen": 30586, - "asting": 30587, - "ĠolduÄŁu": 30588, - "Ġforts": 30589, - "ãģī": 30590, - "riment": 30591, - "iencia": 30592, - "Ġtenir": 30593, - "roffen": 30594, - "Ġ1979": 30595, - "Ġcie": 30596, - "ĠëIJĺê³ł": 30597, - "Ġescri": 30598, - "ÏĮÏĤ": 30599, - "íı¬": 30600, - "uzzy": 30601, - "Cong": 30602, - "ìĿ¸ìĿ´": 30603, - "Great": 30604, - "sil": 30605, - "éch": 30606, - "ãģ¨ãģĭ": 30607, - "Ġmultic": 30608, - "ĠDisk": 30609, - "²ķ": 30610, - "Ġfazla": 30611, - "Ġlevant": 30612, - "Ġabajo": 30613, - "urry": 30614, - "stru": 30615, - "Ġ먹ëĬĶ": 30616, - "Ġaccessory": 30617, - "Ġдвиг": 30618, - "ĠRid": 30619, - "2019": 30620, - "Ġdownstream": 30621, - "æķ¸": 30622, - "Ġkaz": 30623, - "utan": 30624, - "Ġcharcoal": 30625, - "Ġafect": 30626, - "wu": 30627, - "Ġcontexts": 30628, - "Ġfeared": 30629, - "ĠìĦ¤": 30630, - "Ġhistories": 30631, - "Ġfas": 30632, - "ensible": 30633, - "Ġcocoa": 30634, - "illar": 30635, - "geons": 30636, - "Ġspirituality": 30637, - "ĠPew": 30638, - "Ġpharmacy": 30639, - "Ġpassions": 30640, - "Ġbos": 30641, - "Ġallá": 30642, - "Ġthriving": 30643, - "ĠReact": 30644, - "Ġoccupy": 30645, - "Ġwithdrawal": 30646, - "Ġallowance": 30647, - "ĠFraktion": 30648, - "Ġbuddies": 30649, - "Ġidle": 30650, - "Ġdissolved": 30651, - "Ġprevalent": 30652, - "Ġmilitar": 30653, - "Ġsensing": 30654, - "Ġpojaw": 30655, - "Ġancora": 30656, - "Ġabundant": 30657, - "Ġhairst": 30658, - "ãģĤãĤĮ": 30659, - "Ġtwee": 30660, - "Ġnächste": 30661, - "ĠMöglichkeit": 30662, - "Ġhoo": 30663, - "ufficient": 30664, - "Ġfantast": 30665, - "Ġedible": 30666, - "Ġëĸ¨ìĸ´ì": 30667, - "ìĽĥ": 30668, - "Ġvein": 30669, - "ucci": 30670, - "Ġdevotion": 30671, - "Ġconcealer": 30672, - "income": 30673, - "Ġrecycled": 30674, - "ĠìĬ¤íĥĢ": 30675, - "Ġpontos": 30676, - "Ġdessus": 30677, - "Ġvérit": 30678, - "Ġreflections": 30679, - "ĠAA": 30680, - "Ġtakeaway": 30681, - "bare": 30682, - "ĠContact": 30683, - "eil": 30684, - "ĠHear": 30685, - "Ġmirac": 30686, - "ĠGerilim": 30687, - "ĠÑģамÑĭй": 30688, - "Ġvivo": 30689, - "Ġkilograms": 30690, - "ĠCrim": 30691, - "ût": 30692, - "78": 30693, - "Ġsincerely": 30694, - "raz": 30695, - "Ġë³µ": 30696, - "Ġarriv": 30697, - "Ġconception": 30698, - "ĠPersian": 30699, - "Ġsjäl": 30700, - "Ġstarring": 30701, - "ĠìķĦ무": 30702, - "ĠForever": 30703, - "еÑģÑĤÑĮ": 30704, - "Ġveil": 30705, - "Ġsubtit": 30706, - "odka": 30707, - "ĠоÑĤноÑĪ": 30708, - "Ġcooks": 30709, - "енÑı": 30710, - "Kay": 30711, - "Ġniños": 30712, - "ĠPhone": 30713, - "Ġstitching": 30714, - "Ġfingerprint": 30715, - "é¢ĺ": 30716, - "λά": 30717, - "Ġdedicate": 30718, - "ĠLob": 30719, - "Ġblacks": 30720, - "ĠBle": 30721, - "bout": 30722, - "ĠÄijang": 30723, - "Ġeks": 30724, - "Ġsquash": 30725, - "ĠKü": 30726, - "odi": 30727, - "ĠnÆ°á»Ľc": 30728, - "Ġvoyage": 30729, - "Ġplayful": 30730, - "ĠØ¥ÙĦÙī": 30731, - "anic": 30732, - "Ġcondemn": 30733, - "ĠBöyle": 30734, - "ĠPolize": 30735, - "ãĤ¿ãĥ¼": 30736, - "Ġayuda": 30737, - "Ġpam": 30738, - "à¹Ħà¸Ľ": 30739, - "ĠKathy": 30740, - "един": 30741, - "нова": 30742, - "Ġbrig": 30743, - "eger": 30744, - "Ġeagle": 30745, - "Ġvisions": 30746, - "ĠíķŃìĥģ": 30747, - "Ġshitty": 30748, - "Ġhott": 30749, - "ĠBritt": 30750, - "utors": 30751, - "ENTE": 30752, - "æĽ²": 30753, - "Ġphon": 30754, - "ĠBing": 30755, - "ĠподдеÑĢж": 30756, - "spring": 30757, - "æĸ¯": 30758, - "etten": 30759, - "Ġpilgr": 30760, - "Ġediyor": 30761, - "енÑĤÑĭ": 30762, - "aggio": 30763, - "Ġjul": 30764, - "Ġcomprend": 30765, - "teil": 30766, - "Ġز": 30767, - "Ġperformers": 30768, - "Ġinfamous": 30769, - "ĠMK": 30770, - "çª": 30771, - "æ³ģ": 30772, - "otle": 30773, - "eff": 30774, - "ĠHash": 30775, - "Ġcoward": 30776, - "ĠBRA": 30777, - "ĠDD": 30778, - "Ġcomida": 30779, - "Ġplata": 30780, - "Ġflap": 30781, - "ĠMehr": 30782, - "ribution": 30783, - "ĠYemen": 30784, - "Ġmysteries": 30785, - "ĠÄ°yi": 30786, - "Ġstell": 30787, - "Ġeyeliner": 30788, - "Ġdeles": 30789, - "Ġnailed": 30790, - "Ġillnesses": 30791, - "Ġstacks": 30792, - "Ġtrabajar": 30793, - "flower": 30794, - "ciu": 30795, - "Ġcrude": 30796, - "Ġsubstantially": 30797, - "Ġhomem": 30798, - "Ġnephew": 30799, - "Ġstamps": 30800, - "Ġcarbs": 30801, - "ÑĮÑĤе": 30802, - "mooth": 30803, - "Ġtunnels": 30804, - "acie": 30805, - "æ³¢": 30806, - "ĠSeñ": 30807, - "ĠHera": 30808, - "ĠìķĦëĭĪìĹIJìļĶ": 30809, - "ĠWyoming": 30810, - "ĠHDMI": 30811, - "ĠLis": 30812, - "ución": 30813, - "Ġsteer": 30814, - "оÑİ": 30815, - "иÑĤа": 30816, - "NT": 30817, - "Ġìĸ¼êµ´": 30818, - "Ġpalms": 30819, - "Ġneon": 30820, - "ованиÑı": 30821, - "Ġfiltering": 30822, - "Ġjouer": 30823, - "ĠHö": 30824, - "ĠнеÑģ": 30825, - "ê²łìĸ´ìļĶ": 30826, - "Ġ81": 30827, - "Ġstoryline": 30828, - "Ġprzep": 30829, - "Ġthanking": 30830, - "ĠBoeing": 30831, - "Ġsoftly": 30832, - "jem": 30833, - "алÑĮнÑĭÑħ": 30834, - "Ġflashlight": 30835, - "ĠпÑĥ": 30836, - "ĠWOMAN": 30837, - "ắc": 30838, - "ÃŃch": 30839, - "Ġluxurious": 30840, - "Ġwün": 30841, - "Ġimpactful": 30842, - "Ġconson": 30843, - "reu": 30844, - "irring": 30845, - "ifter": 30846, - "Ġconstituents": 30847, - "èIJ½": 30848, - "Ġ94": 30849, - "ĠTou": 30850, - "gom": 30851, - "ĠìĥĿê°ģìĿĦ": 30852, - "Ġstereotypes": 30853, - "Ġmożli": 30854, - "åĪĨ享": 30855, - "Ĥ¨": 30856, - "Ġpencils": 30857, - "ĠÑģлож": 30858, - "Ġihrem": 30859, - "ĠBesch": 30860, - "ĠKoh": 30861, - "ĠEntscheid": 30862, - "Ġlek": 30863, - "Ġförs": 30864, - "Ġtotalmente": 30865, - "Ġlively": 30866, - "Ġentropy": 30867, - "Ġdiscern": 30868, - "ĠÐĹна": 30869, - "Ġdov": 30870, - "Ġmythology": 30871, - "è¨ĺå¾Ĺ": 30872, - "apanese": 30873, - "Ġapproximate": 30874, - "аÑĤив": 30875, - "ifiable": 30876, - "ĠSeo": 30877, - "åĢĴ": 30878, - "´ìĭ¬íŀĪ": 30879, - "Ġìĺ·": 30880, - "Ġtemporal": 30881, - "ĠiT": 30882, - "Ġestat": 30883, - "ким": 30884, - "Ġsprink": 30885, - "Ġgrund": 30886, - "Ġinfantry": 30887, - "Ġschaffen": 30888, - "ç´Ħ": 30889, - "Ġank": 30890, - "riages": 30891, - "ĠYeon": 30892, - "ĠMoroc": 30893, - "Ġinvasive": 30894, - "ģĶ": 30895, - "Ġparenting": 30896, - "ĠRis": 30897, - "ibile": 30898, - "Ġmods": 30899, - "å½¢": 30900, - "ĠпÑĢовеÑĢ": 30901, - "ĠThing": 30902, - "ĠWherever": 30903, - "Ġacknowledging": 30904, - "Ġpawn": 30905, - "ummer": 30906, - "orb": 30907, - "69": 30908, - "Ġretrouve": 30909, - "Ġrelies": 30910, - "ĠHighway": 30911, - "Ġawe": 30912, - "ãģ§ãģĻãģĭ": 30913, - "itaire": 30914, - "Ġapplicant": 30915, - "Ġaisle": 30916, - "worm": 30917, - "Ġpayload": 30918, - "Ġcarre": 30919, - "ĠBach": 30920, - "æł¼": 30921, - "Ġì¹ľêµ¬ë": 30922, - "ние": 30923, - "ĠitÃŃs": 30924, - "onnaise": 30925, - "sol": 30926, - "èı¯": 30927, - "algia": 30928, - "Ġrocking": 30929, - "Ġbesten": 30930, - "rites": 30931, - "^^": 30932, - "иной": 30933, - "Ġbaixo": 30934, - "Ġ기ìĸµ": 30935, - "оÑĤÑĢи": 30936, - "sim": 30937, - "Ġincarn": 30938, - "ëĭ¤ìĿĮ": 30939, - "Ġlick": 30940, - "sided": 30941, - "Ġ71": 30942, - "forder": 30943, - "Ġresonance": 30944, - "Ġtegen": 30945, - "Ġmetaph": 30946, - "owser": 30947, - "Ġ×IJ׳×Ĺ׳×ķ": 30948, - "?ãĢį": 30949, - "Ġspielen": 30950, - "Ġvolley": 30951, - "ĶìĿ´íģ¬ìĹħ": 30952, - "looked": 30953, - "Ġsentenced": 30954, - "Ġmultiplying": 30955, - "Ġideals": 30956, - "Ġwahrscheinlich": 30957, - "Ġdeposits": 30958, - "bilir": 30959, - "Ġeffet": 30960, - "illon": 30961, - "Īë§Į": 30962, - "Ġtestimon": 30963, - "Ġzawsze": 30964, - "ĠпÑĢоÑĨеÑģÑģ": 30965, - "ĠLav": 30966, - "ä¸įéĮ¯": 30967, - "Ġtravailler": 30968, - "Ġlaisse": 30969, - "ĠMountains": 30970, - "ĠÑĢоб": 30971, - "Ġexamined": 30972, - "itus": 30973, - "Was": 30974, - "лÑĭ": 30975, - "Ġattributed": 30976, - "ĠìĬ¹": 30977, - "ĠBaron": 30978, - "Ġgep": 30979, - "Ġattent": 30980, - "ĠCollection": 30981, - "Ġtheat": 30982, - "ĠCai": 30983, - "Ġwells": 30984, - "Ġhumano": 30985, - "çĹħ": 30986, - "ĠHast": 30987, - "ĠÑħоÑĤÑı": 30988, - "czas": 30989, - "Ġpermits": 30990, - "Ġlegg": 30991, - "Ġepo": 30992, - "ĠFen": 30993, - "Ġthi": 30994, - "ĠFoi": 30995, - "Ġélect": 30996, - "Ġ83": 30997, - "Ġoverth": 30998, - "Ġè¬Ŀè¬Ŀ": 30999, - "Ġtenant": 31000, - "è²·": 31001, - "Next": 31002, - "Ġpraised": 31003, - "security": 31004, - "ĠImpact": 31005, - "为ä»Ģä¹Ī": 31006, - "Ġvouch": 31007, - "Ġnegó": 31008, - "Ġunve": 31009, - "Ġcriticize": 31010, - "ĠKenya": 31011, - "Ġtactic": 31012, - "Ġlogr": 31013, - "Ġpois": 31014, - "Ġpapa": 31015, - "speaks": 31016, - "ðŁij": 31017, - "ispers": 31018, - "Ġsurplus": 31019, - "Ġcolder": 31020, - "åįĹ": 31021, - "åIJ¬": 31022, - "plets": 31023, - "ĠVienna": 31024, - "ĠLead": 31025, - "Ġaerial": 31026, - "ĠTah": 31027, - "енÑĤов": 31028, - "ĠGreeks": 31029, - "Cam": 31030, - "Ġmáxim": 31031, - "Ġkuin": 31032, - "chio": 31033, - "Ġdemonstrates": 31034, - "anos": 31035, - "ĠCert": 31036, - "ĠÑįн": 31037, - "Ġblogs": 31038, - "ĠìĦľìļ¸": 31039, - "Ġbeams": 31040, - "иков": 31041, - "Ġprompted": 31042, - "Ġfrightening": 31043, - "ĠPorsche": 31044, - "ãģĪãģ¦": 31045, - "larını": 31046, - "Ġchilling": 31047, - "isphere": 31048, - "Ġflashing": 31049, - "ĠKard": 31050, - "bread": 31051, - "Ġexh": 31052, - "Ġtycker": 31053, - "Ġecological": 31054, - "ĠMae": 31055, - "Ġ×ŀ×IJ×ķ×ĵ": 31056, - "ĠëĤĺëıĦ": 31057, - "лон": 31058, - "yss": 31059, - "Ġpergunt": 31060, - "Ġprix": 31061, - "izzard": 31062, - "Ġcancers": 31063, - "Ġ91": 31064, - "susp": 31065, - "ĠItem": 31066, - "ÅŁa": 31067, - "Ġpest": 31068, - "ĠtakÄħ": 31069, - "Ġlymph": 31070, - "ĠPatri": 31071, - "fill": 31072, - "Ġreconna": 31073, - "Ġoptimism": 31074, - "Ġmimic": 31075, - "Ġì²ľ": 31076, - "ĠMadame": 31077, - "ocy": 31078, - "lining": 31079, - "åijĬ訴": 31080, - "erme": 31081, - "Ġfolders": 31082, - "ĠczÅĤ": 31083, - "uchar": 31084, - "Ġcurso": 31085, - "Ġbreach": 31086, - "ниÑĤÑĮ": 31087, - "ĠpamiÄĻ": 31088, - "Ġelig": 31089, - "Ġautop": 31090, - "Flow": 31091, - "Ġprogrammed": 31092, - "ĠProcess": 31093, - "Ġfigur": 31094, - "ĠSF": 31095, - "ĠEles": 31096, - "Ġprogrammes": 31097, - "Ġdizzy": 31098, - "ìĭľê°Ħ": 31099, - "Ġлибо": 31100, - "Ġsniff": 31101, - "ĠSebastian": 31102, - "ĠHye": 31103, - "Ġ4000": 31104, - "Ġpermite": 31105, - "æ¢Ŀ": 31106, - "ĠзаÑī": 31107, - "Ġguit": 31108, - "ĠDais": 31109, - "Ġaccordance": 31110, - "Ġmodular": 31111, - "ogeneous": 31112, - "æĭį": 31113, - "Ġpouquinho": 31114, - "Ġartillery": 31115, - "Ġlubric": 31116, - "Ġvolcan": 31117, - "ĠNH": 31118, - "ðŁ¤": 31119, - "Ġdean": 31120, - "Rh": 31121, - "Ġministre": 31122, - "åĿIJ": 31123, - "ĠInv": 31124, - "ĠBulgar": 31125, - "ĠDaten": 31126, - "èİ": 31127, - "Im": 31128, - "Ġoriginated": 31129, - "ĠNixon": 31130, - "integr": 31131, - "Ġlacks": 31132, - "ĠNacht": 31133, - "ìĸ´ëĤĺ": 31134, - "camera": 31135, - "Ġradish": 31136, - "kiye": 31137, - "Ġanges": 31138, - "Ġpréf": 31139, - "juk": 31140, - "ĠBee": 31141, - "ĠBU": 31142, - "ĠвоÑģп": 31143, - "ĠBT": 31144, - "êmes": 31145, - "ĠStück": 31146, - "ĠInk": 31147, - "æĪĸèĢħ": 31148, - "ĠSergeant": 31149, - "ĠMultip": 31150, - "Ġhiçbir": 31151, - "ĠСам": 31152, - "ĠDé": 31153, - "olph": 31154, - "ìĸ¸": 31155, - "Ġimpat": 31156, - "ĠìķĬê³ł": 31157, - "ĠÑĤакого": 31158, - "ĠнавеÑĢное": 31159, - "Ġunpredictable": 31160, - "Ġmend": 31161, - "ĠìĹĨìĸ´ìļĶ": 31162, - "ĠjakieÅĽ": 31163, - "Ġanni": 31164, - "Ġdonné": 31165, - "ĠKirsty": 31166, - "Ġrectangular": 31167, - "Ġempezar": 31168, - "ĠExchange": 31169, - "ê°Ķ": 31170, - "Ġéconom": 31171, - "ãģĵãĤĵ": 31172, - "elin": 31173, - "reibt": 31174, - "Ġ×Ķפ": 31175, - "Ġcemetery": 31176, - "Ġespañol": 31177, - "olin": 31178, - "лÑİд": 31179, - "Ġgrâce": 31180, - "allen": 31181, - "ĠPhilos": 31182, - "ĠErst": 31183, - "ĠìĥĪ": 31184, - "ĠVid": 31185, - "Give": 31186, - "OH": 31187, - "μο": 31188, - "ĠPare": 31189, - "Ġmetabolism": 31190, - "Ġmaple": 31191, - "Ġaxle": 31192, - "ĠDy": 31193, - "Ġkomme": 31194, - "Ïİν": 31195, - "Ġgreatness": 31196, - "Ġverified": 31197, - "Ġspé": 31198, - "ĠFahrenheit": 31199, - "ĠBren": 31200, - "ĠConfeder": 31201, - "Ġhistoire": 31202, - "Ġeliminating": 31203, - "ĠAdding": 31204, - "ĠAbi": 31205, - "æĿİ": 31206, - "Ġhospitality": 31207, - "tim": 31208, - "Ġbonito": 31209, - "Ġpartes": 31210, - "ĠдÑĢÑĥгиÑħ": 31211, - "ĠShay": 31212, - "ĠSed": 31213, - "Ġregrets": 31214, - "Ñıми": 31215, - "Ġtenants": 31216, - "éĢŁ": 31217, - "ĠPTS": 31218, - "Ġdevi": 31219, - "ĠLate": 31220, - "uez": 31221, - "Ġsöyl": 31222, - "ãĤ»": 31223, - "Ġìŀ¬ë°Į": 31224, - "Ġtoggle": 31225, - "Ġmasking": 31226, - "алÑĮного": 31227, - "Ġpersön": 31228, - "Ġamerican": 31229, - "fik": 31230, - "ĠRGB": 31231, - "enson": 31232, - "ĠKA": 31233, - "wwww": 31234, - "ĠÑĢег": 31235, - "metics": 31236, - "Ġeducator": 31237, - "ãĤ·ãĥ«ãĤ¯": 31238, - "park": 31239, - "елÑĮзÑı": 31240, - "arus": 31241, - "ÑĢеÑĤ": 31242, - "Ġfeito": 31243, - "Ġchoir": 31244, - "Ġlargo": 31245, - "Ġeens": 31246, - "Ġwatts": 31247, - "ĠSingle": 31248, - "Ġsusceptible": 31249, - "icer": 31250, - "ĠвклÑİÑĩ": 31251, - "Ġpus": 31252, - "íĻĺ": 31253, - "Eng": 31254, - "Ġfantas": 31255, - "Ġspecification": 31256, - "Ġconfronted": 31257, - "ĠColumbus": 31258, - "ивеÑĤ": 31259, - "arım": 31260, - "Ġcaffeine": 31261, - "munition": 31262, - "Ġmigrants": 31263, - "lide": 31264, - "itations": 31265, - "ĠGeme": 31266, - "ẫ": 31267, - "Ġplanner": 31268, - "Ġstimulate": 31269, - "Ġaproxim": 31270, - "ceu": 31271, - "ĠNom": 31272, - "Ġvog": 31273, - "ĠÑĢаÑģÑĤ": 31274, - "Ġenseñ": 31275, - "Ġsellers": 31276, - "Ġguten": 31277, - "zd": 31278, - "Cal": 31279, - "Ġdescript": 31280, - "Ġreconciliation": 31281, - "zinho": 31282, - "á¹ĩa": 31283, - "ãģĺãĤĥãģĤ": 31284, - "acyj": 31285, - "ĠCOL": 31286, - "saw": 31287, - "ĠíĻķìĿ¸": 31288, - "Ġvarit": 31289, - "Ġpartnering": 31290, - "Ġdetention": 31291, - "Ġbombing": 31292, - "clapping": 31293, - "iencies": 31294, - "ondu": 31295, - "AME": 31296, - "Ġê°ĻìĬµëĭĪëĭ¤": 31297, - "cÃŃa": 31298, - "ĠпоÑģÑĤо": 31299, - "ĠASMR": 31300, - "Ġhomepage": 31301, - "Ġsiè": 31302, - "antha": 31303, - "ĠPoll": 31304, - "Ġigen": 31305, - "cych": 31306, - "Ġê°ijìŀIJ기": 31307, - "Ġconsiderably": 31308, - "ä»ĸçļĦ": 31309, - "ĠArist": 31310, - "Ġwithstand": 31311, - "Ġqualitative": 31312, - "ĠKraft": 31313, - "ĠÑįлекÑĤ": 31314, - "ĠBead": 31315, - "екÑĤив": 31316, - "Ġcrushing": 31317, - "ì³IJ": 31318, - "Ġnavy": 31319, - "ÙĪÚº": 31320, - "sho": 31321, - "Ġoak": 31322, - "ippers": 31323, - "Ġsoils": 31324, - "Ġpigment": 31325, - "Ġevitar": 31326, - "ãĥĩ": 31327, - "Ġfuse": 31328, - "ĠDale": 31329, - ":\"": 31330, - "Ġcomplètement": 31331, - "Ġkel": 31332, - "à¹Ĩ": 31333, - "Ġquatre": 31334, - "ĠUM": 31335, - "Ġë§IJë": 31336, - "æł¹": 31337, - "ÃŃr": 31338, - "Ġleisure": 31339, - "ĠHousing": 31340, - "Ġfolds": 31341, - "estion": 31342, - "ARS": 31343, - "Ġmash": 31344, - "urpose": 31345, - "Ġaccumulated": 31346, - "ĠStuff": 31347, - "èªŀ": 31348, - "Ġtapes": 31349, - "ĠÑģилÑĮно": 31350, - "ĠLOVE": 31351, - "Ġ1982": 31352, - "Ġscars": 31353, - "Ġcapitalist": 31354, - "ĠNed": 31355, - "Ġsoften": 31356, - "Ġnotably": 31357, - "Ġforcément": 31358, - "ĠRaum": 31359, - "ĠнеобÑħод": 31360, - "Ġtrademark": 31361, - "Ġfertig": 31362, - "Ġ?!": 31363, - "æĹł": 31364, - "Ġreinforced": 31365, - "Ġrecharge": 31366, - "ĠPutting": 31367, - "Ġvillains": 31368, - "Ġhandic": 31369, - "Ġadvertisement": 31370, - "تÙĬ": 31371, - "ĠÑģÑĥм": 31372, - "ĠRiley": 31373, - "×ķ×ij×": 31374, - "京": 31375, - "Os": 31376, - "از": 31377, - "Boy": 31378, - "Ġsquish": 31379, - "ocket": 31380, - "Ġtestify": 31381, - "æ¼Ķ": 31382, - "Ġ׾×ŀ×": 31383, - "ĠмаÑģÑģ": 31384, - "manuel": 31385, - "ĠArkansas": 31386, - "iffe": 31387, - "Ġanalysts": 31388, - "ĠDeaf": 31389, - "Ġjó": 31390, - "Ġgroceries": 31391, - "ĠWheel": 31392, - "ĠÑĢиÑģ": 31393, - "Ġcòn": 31394, - "ĠCob": 31395, - "Ġprisons": 31396, - "ève": 31397, - "ĠCabinet": 31398, - "Ġposed": 31399, - "Ġguerre": 31400, - "ĠLloyd": 31401, - "Ġclerk": 31402, - "Ġcrises": 31403, - "ĠSho": 31404, - "ĠOre": 31405, - "ĠFootball": 31406, - "ĠAdvis": 31407, - "ĠZheng": 31408, - "èį": 31409, - "ĠAMY": 31410, - "Ġunfor": 31411, - "Ġmonaster": 31412, - "Ġcompile": 31413, - "Ġimmortal": 31414, - "atable": 31415, - "Ġparano": 31416, - "Ġtiver": 31417, - "ĠSteph": 31418, - "ĠFuÃŁ": 31419, - "Ġdiscontin": 31420, - "Ġripe": 31421, - "Ġhacking": 31422, - "Ġsiendo": 31423, - "Ġseguro": 31424, - "altres": 31425, - "Ġanderes": 31426, - "Ġ리ë": 31427, - "Ġexports": 31428, - "æŃ¥": 31429, - "Ġtabii": 31430, - "Ġ기ëĭ¤ë": 31431, - "Ġbothering": 31432, - "Ġpickle": 31433, - "ĠBRIAN": 31434, - "Ġaltar": 31435, - "ĠпÑĢиб": 31436, - "Ġtransferring": 31437, - "ĠVors": 31438, - "ĠÙĩÙĪ": 31439, - "ĠZa": 31440, - "ĠFrances": 31441, - "Ġbrowse": 31442, - "emit": 31443, - "Ġchewing": 31444, - "ĠFreddy": 31445, - "Ġeditors": 31446, - "älle": 31447, - "ĠíĮĢ": 31448, - "ĠSque": 31449, - "ĠCultural": 31450, - "awk": 31451, - "ĠSache": 31452, - "ĠCarbon": 31453, - "ắt": 31454, - "FL": 31455, - "ĠNGO": 31456, - "peÅĤ": 31457, - "ĠSou": 31458, - "Ġhvor": 31459, - "unintelligible": 31460, - "Ġë²ķ": 31461, - "Ġ°": 31462, - "iin": 31463, - "Ġ×¢×Ŀ": 31464, - "Ġderrière": 31465, - "Ġczym": 31466, - "ĠApost": 31467, - "Ġregarder": 31468, - "Ġagrade": 31469, - "ĠCandy": 31470, - "Ġmare": 31471, - "Ġintroduces": 31472, - "birds": 31473, - "Ġuniquely": 31474, - "Ġmuk": 31475, - "Ġcooker": 31476, - "Ġcrews": 31477, - "Ġjeito": 31478, - "ERT": 31479, - "¶Ħë": 31480, - "nisse": 31481, - "Ġef": 31482, - "Ġcarte": 31483, - "ĠYak": 31484, - "ĠPAT": 31485, - "ино": 31486, - "bokki": 31487, - "Ġmates": 31488, - "Ġdistint": 31489, - "Ġì½Ķë¡ľëĤĺ": 31490, - "Ġyıl": 31491, - "Ġκάν": 31492, - "Ġconfigurations": 31493, - "enga": 31494, - "recht": 31495, - "Happy": 31496, - "ãĤĦãģ£ãģ¦": 31497, - "invest": 31498, - "Ġreconstruct": 31499, - "ĠÑįÑĤомÑĥ": 31500, - "Ġmosque": 31501, - "raum": 31502, - "Ġvoyez": 31503, - "ĠNBC": 31504, - "ĠìŀIJìĭł": 31505, - "Ġsturdy": 31506, - "Ġкап": 31507, - "Ġansch": 31508, - "alid": 31509, - "Ġmasih": 31510, - "ĠREP": 31511, - "Ġì½Ķë": 31512, - "Ġdeduct": 31513, - "Ġsalir": 31514, - "wurf": 31515, - "ilot": 31516, - "ĠMutter": 31517, - "olds": 31518, - "ĠFEMA": 31519, - "ĠBib": 31520, - "Ġneighboring": 31521, - "Ġbliss": 31522, - "Ġíĺ¼": 31523, - "лиÑģÑĮ": 31524, - "ĠÑĤÑĢеб": 31525, - "Ġå°±æĺ¯": 31526, - "Ġgrenade": 31527, - "Ġegal": 31528, - "Ġfinely": 31529, - "Ġpetals": 31530, - "Ġkeer": 31531, - "Ġchyba": 31532, - "Ġskipping": 31533, - "Ġthirteen": 31534, - "Ġgravy": 31535, - "ĠSAT": 31536, - "61": 31537, - "Ġног": 31538, - "Ġmins": 31539, - "ITE": 31540, - "Ġsozial": 31541, - "íķĺë©´ìĦľ": 31542, - "ruktur": 31543, - "Ġвозмож": 31544, - "ĠопÑıÑĤÑĮ": 31545, - "Ġarth": 31546, - "ĠCuban": 31547, - "Ġtreasures": 31548, - "Ġfertilizer": 31549, - "Ġawakening": 31550, - "Ġë°±ìĭł": 31551, - "Ġrall": 31552, - "Ġdepict": 31553, - "ĠPablo": 31554, - "Ġnineteen": 31555, - "Ġwatt": 31556, - "Ġentirety": 31557, - "KS": 31558, - "ĠWoods": 31559, - "Sch": 31560, - "ĠÚ©ÙĪ": 31561, - "ĠDry": 31562, - "ãģŀ": 31563, - "uve": 31564, - "Ġreconstruction": 31565, - "Ġanatomy": 31566, - "Ī를": 31567, - "Ġbaba": 31568, - "Ġlistener": 31569, - "Ġsharpen": 31570, - "ĠPeru": 31571, - "ĠвÑĭз": 31572, - "Ġrecreation": 31573, - "Ġinitiate": 31574, - "Ġcalor": 31575, - "ĠNaj": 31576, - "gee": 31577, - "ĠFeels": 31578, - "ĠSnapchat": 31579, - "ĠTet": 31580, - "ĠNest": 31581, - "ĠDaf": 31582, - "ĠFinish": 31583, - "ĠÑĤаким": 31584, - "úc": 31585, - "izens": 31586, - "Ġspins": 31587, - "Ġembry": 31588, - "Ġpassages": 31589, - "Ġcient": 31590, - "Ġjustification": 31591, - "ä»ĸ說": 31592, - "Ġolmaz": 31593, - "Ġflooded": 31594, - "Ġemoji": 31595, - "Ġembracing": 31596, - "Ġdiscard": 31597, - "ĠBasic": 31598, - "agog": 31599, - "ĠìľĦíķ´": 31600, - "Ġasylum": 31601, - "erin": 31602, - "Ġfim": 31603, - "Ġninja": 31604, - "Ġautomate": 31605, - "Ġallergic": 31606, - "ÿÿÿÿ": 31607, - "amam": 31608, - "ĠмаÑĢ": 31609, - "ĠOi": 31610, - "äus": 31611, - "Ġinduct": 31612, - "ĠBEN": 31613, - "ĠzÅĤ": 31614, - "Ġkażdy": 31615, - "ĠAMP": 31616, - "nÄĽ": 31617, - "Sure": 31618, - "Ġquil": 31619, - "Ġespec": 31620, - "rok": 31621, - "BSCRI": 31622, - "Ġliebe": 31623, - "pus": 31624, - "achsen": 31625, - "Ġcricket": 31626, - "ëĬIJ": 31627, - "ĠFrame": 31628, - "ekkür": 31629, - "arb": 31630, - "ĠpÅĻ": 31631, - "иÑģÑģ": 31632, - "Ġzeggen": 31633, - "Ġdoubles": 31634, - "ĠDre": 31635, - "test": 31636, - "insp": 31637, - "boys": 31638, - "Ġmão": 31639, - "ĠVerse": 31640, - "Ġmuscular": 31641, - "ĠMALE": 31642, - "Ġdulu": 31643, - "Ġoccasional": 31644, - "Lo": 31645, - "conomic": 31646, - "Ġvak": 31647, - "Ġremedy": 31648, - "å¤ł": 31649, - "ĠâĻªâĻªâĻª": 31650, - "vem": 31651, - "Ġönem": 31652, - "ĠkarÅŁÄ±": 31653, - "ĠSharp": 31654, - "hur": 31655, - "Ġë°©ë²ķ": 31656, - "Ġgrandson": 31657, - "Ġaktiv": 31658, - "ĠThrones": 31659, - "ĠìķĪìĹIJ": 31660, - "Ġtots": 31661, - "Ġsubd": 31662, - "ĠPaula": 31663, - "Ġgraves": 31664, - "ĠBrent": 31665, - "ĠникÑĤо": 31666, - "Ġsöz": 31667, - "Ġcrec": 31668, - "ĠVladimir": 31669, - "çĸ«": 31670, - "Ġпой": 31671, - "Ġ\"-": 31672, - "Ġpsy": 31673, - "atri": 31674, - "idan": 31675, - "Ġaún": 31676, - "Ġstandardized": 31677, - "ì¹ĺë": 31678, - "ĠкÑĢов": 31679, - "ĠZhu": 31680, - "something": 31681, - "Ġ750": 31682, - "Ġmujeres": 31683, - "Ġait": 31684, - "éĹ´": 31685, - "agu": 31686, - "Ġcorrected": 31687, - "ikka": 31688, - "eled": 31689, - "ĠCareer": 31690, - "owym": 31691, - "Ġroommate": 31692, - "Ġdescendants": 31693, - "ĠNapoleon": 31694, - "ĠÐĶо": 31695, - "íĸĪìĸ´ìļĶ": 31696, - "Ġbunun": 31697, - "ĠMicha": 31698, - "ç·ļ": 31699, - "Ġdescob": 31700, - "PI": 31701, - "Ġpalabra": 31702, - "Ġtracked": 31703, - "Ġdependence": 31704, - "ĠBarack": 31705, - "åģĩ": 31706, - "Ġfertility": 31707, - "ĠSouthwest": 31708, - "Ġincomplete": 31709, - "Ġcomunic": 31710, - "Ġcompris": 31711, - "ĠRestaur": 31712, - "Ġacron": 31713, - "κα": 31714, - "Ġapprentices": 31715, - "Ġmusst": 31716, - "ĠAbr": 31717, - "Ġpentru": 31718, - "ĠConsort": 31719, - "ĠAvec": 31720, - "Ġdumplings": 31721, - "LR": 31722, - "Ġwszystkie": 31723, - "Ġswamp": 31724, - "нев": 31725, - "uggle": 31726, - "Ġwatercolor": 31727, - "Ġproton": 31728, - "ĠEspaña": 31729, - "ocking": 31730, - "овал": 31731, - "Ġtakim": 31732, - "Very": 31733, - "Ġdementia": 31734, - "ĠÅŁeyi": 31735, - "Jac": 31736, - "ĠMacBook": 31737, - "ĠLiv": 31738, - "fficients": 31739, - "ĠHunt": 31740, - "Ġoverlay": 31741, - "æĦŁè¦º": 31742, - "ĠSkype": 31743, - "punkt": 31744, - "Ġconfined": 31745, - "ĠAdrian": 31746, - "رÙĥ": 31747, - "ĠJeep": 31748, - "Ġenquanto": 31749, - "Ġanest": 31750, - "оÑĤвеÑĤ": 31751, - "ĠменÑĮ": 31752, - "Ġirrigation": 31753, - "á»ijn": 31754, - "Ġeighteen": 31755, - "ĠPon": 31756, - "Ġrescued": 31757, - "Ġ1983": 31758, - "rü": 31759, - "jae": 31760, - "ĠJeong": 31761, - "Ġamazingly": 31762, - "ĠFDP": 31763, - "Ġbackstage": 31764, - "cue": 31765, - "ĠÏĥÏĦην": 31766, - "ĠاÙĦص": 31767, - "Ġlivestock": 31768, - "ĠWarner": 31769, - "Ġmajors": 31770, - "ãĥģãĥ£": 31771, - "Ġcooperative": 31772, - "ĠBrady": 31773, - "rained": 31774, - "rieb": 31775, - "Ġ×ij×ŀ×": 31776, - "ĠдоволÑĮно": 31777, - "ĠFE": 31778, - "Ġleaked": 31779, - "ĠMercury": 31780, - "Ġpersuade": 31781, - "Ġtransformer": 31782, - "ĠNorweg": 31783, - "ĠìŬ룬": 31784, - "ĠzrobiÄĩ": 31785, - "Ġcardiovascular": 31786, - "ĠCrash": 31787, - "Ġgossip": 31788, - "аÑģÑĤÑĮ": 31789, - "Ġ쪽": 31790, - "Ġswept": 31791, - "ĠHorn": 31792, - "ĠAté": 31793, - "Ġbukan": 31794, - "ĠKaw": 31795, - "KY": 31796, - "ĠStories": 31797, - "Gary": 31798, - "Ġgardening": 31799, - "ĠQuickly": 31800, - "ĠFalcon": 31801, - "Ġovat": 31802, - "cı": 31803, - "ĠComplet": 31804, - "ĠDate": 31805, - "ĠпÑĢим": 31806, - "Ġläuft": 31807, - "ĠAudrey": 31808, - "ĠWent": 31809, - "ĠpelÃŃcul": 31810, - "Ġcarriage": 31811, - "Ġunacceptable": 31812, - "nymi": 31813, - "ĠÑģлÑĭÑĪ": 31814, - "Ġterre": 31815, - "uellement": 31816, - "EEEE": 31817, - "Ġpharmac": 31818, - "hões": 31819, - "Ġzich": 31820, - "Ġmigrate": 31821, - "ĠFry": 31822, - "ñana": 31823, - "ĠMuito": 31824, - "EOVER": 31825, - "Ġfortress": 31826, - "ĠCompan": 31827, - "ĠJSON": 31828, - "ordnung": 31829, - "Ġwarto": 31830, - "Ġungef": 31831, - "ìħĶìĦľ": 31832, - "ĠÑĢок": 31833, - "Ġpaddle": 31834, - "Jared": 31835, - "Ġsubmitting": 31836, - "Ġlatch": 31837, - "Ġfug": 31838, - "ĠкоÑģ": 31839, - "ĠEf": 31840, - "Ġlaunches": 31841, - "Ġft": 31842, - "otechn": 31843, - "Ġtravelled": 31844, - "اÙģ": 31845, - "éģķ": 31846, - "Ġproch": 31847, - "Ġdedim": 31848, - "83": 31849, - "Ġrebound": 31850, - "ĠLU": 31851, - "path": 31852, - "ĠÑģпÑĢав": 31853, - "Ġöl": 31854, - "ĠíĤ¤": 31855, - "Ġprivat": 31856, - "Ġtractor": 31857, - "ĠAttention": 31858, - "Ser": 31859, - "Ġcoses": 31860, - "ária": 31861, - "pal": 31862, - "ĠìĿĢ": 31863, - "Ġsuccessor": 31864, - "Ġconnectors": 31865, - "ĠÑĥÑģÑĤанов": 31866, - "Ġgenocide": 31867, - "Ġsufficiently": 31868, - "ĠAixò": 31869, - "Ġstabilize": 31870, - "Ġcongest": 31871, - "Ġcarving": 31872, - "Ġzost": 31873, - "ĠбÑĭÑģÑĤÑĢо": 31874, - "Ġshortest": 31875, - "Ġlivel": 31876, - "Ġ89": 31877, - "éģĬ": 31878, - "Ġerk": 31879, - "Ġportraits": 31880, - "à¥Ģ": 31881, - "èĺ": 31882, - "boat": 31883, - "llah": 31884, - "ANC": 31885, - "Ġempirical": 31886, - "ĠEcho": 31887, - "ĠNederland": 31888, - "è¿Ļä¹Ī": 31889, - "Net": 31890, - "Ġcuidado": 31891, - "ĠRoma": 31892, - "Ġcalf": 31893, - "Ġgiants": 31894, - "ĠExplorer": 31895, - "ĠCollect": 31896, - "alition": 31897, - "ĠDestiny": 31898, - "Ġausge": 31899, - "ĠEdu": 31900, - "ĠClo": 31901, - "Ġearrings": 31902, - "ĠTrack": 31903, - "ĠROS": 31904, - "ĠBelle": 31905, - "çĻ¾": 31906, - "Ġpueda": 31907, - "Ġdaytime": 31908, - "Ġsupplier": 31909, - "ĠSV": 31910, - "ĠExhale": 31911, - "Ġgalera": 31912, - "course": 31913, - "Ġcentimeter": 31914, - "ĠBast": 31915, - "mud": 31916, - "Ġsangat": 31917, - "ĠPhysical": 31918, - "Ġprivately": 31919, - "Ġtrata": 31920, - "lynn": 31921, - "illi": 31922, - "Ġë©ĶìĿ´íģ¬ìĹħ": 31923, - "Ġcrystall": 31924, - "Ġpods": 31925, - "ản": 31926, - "inator": 31927, - "ĠRecords": 31928, - "å®ĺ": 31929, - "ÄŁimiz": 31930, - "issement": 31931, - "hare": 31932, - "hadow": 31933, - "ĠDK": 31934, - "ĠìķĮê³ł": 31935, - "Ġwyn": 31936, - "Ġrequesting": 31937, - "ĠDonna": 31938, - "ĠìĹ´ìĭ¬íŀĪ": 31939, - "inea": 31940, - "Ġexert": 31941, - "ĠDuncan": 31942, - "ĠвеÑĩ": 31943, - "ĠHah": 31944, - "à¤Ĥ": 31945, - "ĠLif": 31946, - "ĠFinding": 31947, - "ĠNov": 31948, - "Ġзнак": 31949, - "ĠоÑĦ": 31950, - "ĠQuè": 31951, - "Ġquarterback": 31952, - "ĠÑĦак": 31953, - "Ġbipartisan": 31954, - "ÄŁin": 31955, - "Ġnécess": 31956, - "Ġreferendum": 31957, - "Ġcompiler": 31958, - "Ġprobabil": 31959, - "еди": 31960, - "Ġtrader": 31961, - "æĺĵ": 31962, - "ĠRum": 31963, - "geme": 31964, - "Ġdio": 31965, - "ĠbÄĻdziemy": 31966, - "ĠÏĢά": 31967, - "꾸": 31968, - "×ķ×ĺ": 31969, - "Ġà¤ķ": 31970, - "Ġблаг": 31971, - "Ġscalp": 31972, - "ĠPause": 31973, - "Ġcaption": 31974, - "Ġendanger": 31975, - "Ġenlar": 31976, - "Ġrotten": 31977, - "ãĥĥãĥĪ": 31978, - "Ġwah": 31979, - "èĤī": 31980, - "Ġdzi": 31981, - "ĠInstall": 31982, - "Ay": 31983, - "Ġcrear": 31984, - "енÑĤа": 31985, - "Ġweighing": 31986, - "Ġbutterflies": 31987, - "ĠGast": 31988, - "äºķ": 31989, - "horn": 31990, - "warz": 31991, - "ICEOVER": 31992, - "ĠнайÑĤи": 31993, - "Ġcoefficients": 31994, - "ç°¡åĸ®": 31995, - "ĠSpencer": 31996, - "ĠHigher": 31997, - "Ġcowork": 31998, - "å¨ĺ": 31999, - "ĠкоÑĤоÑĢое": 32000, - "Ġmonit": 32001, - "Ġdysfunction": 32002, - "ĠÑģÑĤанов": 32003, - "Ġtournaments": 32004, - "Ġoyster": 32005, - "BN": 32006, - "Ġtrud": 32007, - "slow": 32008, - "ĠPenny": 32009, - "ĠOdys": 32010, - "ær": 32011, - "Ġfou": 32012, - "Ġenjoyment": 32013, - "аÑĤÑĭ": 32014, - "ĠwyglÄħda": 32015, - "алÑĮнаÑı": 32016, - "ĠProtect": 32017, - "Ġmoy": 32018, - "Ġclaw": 32019, - "Ġsuspicion": 32020, - "Ġsacrificed": 32021, - "Ġgosto": 32022, - "Big": 32023, - "Ġaggressively": 32024, - "Ġvorne": 32025, - "ãĥł": 32026, - "Ġblamed": 32027, - "ĠSehr": 32028, - "פר": 32029, - "cito": 32030, - "Ġseals": 32031, - "Ġmujer": 32032, - "ĠWeird": 32033, - "Ġforens": 32034, - "Ġcontributes": 32035, - "estra": 32036, - "Ġpog": 32037, - "LOL": 32038, - "Ġhacerlo": 32039, - "оÑĤÑĮ": 32040, - "fiction": 32041, - "79": 32042, - "λο": 32043, - "大æ¦Ĥ": 32044, - "声": 32045, - "ĠÑĤоб": 32046, - "ĠGS": 32047, - "ĠClara": 32048, - "itez": 32049, - "Ġadvocating": 32050, - "ĠíĶĦë": 32051, - "sung": 32052, - "Ġvertices": 32053, - "Ġnavigating": 32054, - "Ġeuropé": 32055, - "çļĨ": 32056, - "Ġslowed": 32057, - "Ġforeground": 32058, - "ĠIndustrial": 32059, - "Ġadore": 32060, - "ìĭŃ": 32061, - "Ġcréer": 32062, - "æŀĹ": 32063, - "chnitt": 32064, - "Ġunaware": 32065, - "Ġcurly": 32066, - "entar": 32067, - "Ġler": 32068, - "Ġprohibited": 32069, - "ĠHeroes": 32070, - "ĠReed": 32071, - "uca": 32072, - "Ġsmok": 32073, - "Ġkunna": 32074, - "zeitig": 32075, - "immen": 32076, - "ĠLun": 32077, - "ĠабÑģолÑİÑĤ": 32078, - "Ġdegli": 32079, - "Ġvillagers": 32080, - "Ġpreset": 32081, - "zept": 32082, - "uds": 32083, - "Ġemit": 32084, - "ä½łè¦ģ": 32085, - "Ġëī": 32086, - "ëĬĶì§Ģ": 32087, - "нако": 32088, - "Ġosób": 32089, - "Ġ1969": 32090, - "ĠÐIJÑĢ": 32091, - "Ġmanchmal": 32092, - "ĠBrock": 32093, - "Ġmantra": 32094, - "ĠWIL": 32095, - "bach": 32096, - "inä": 32097, - "elas": 32098, - "keln": 32099, - "Ġdisciple": 32100, - "Ġqualc": 32101, - "Ġdehyd": 32102, - "ìĿ´ëĿ¼ëĬĶ": 32103, - "Af": 32104, - "ìĦ±ìĿ´": 32105, - "Ryan": 32106, - "Ġpuppet": 32107, - "ĠдÑĢÑĥгие": 32108, - "Ġrud": 32109, - "Ġpending": 32110, - "Plus": 32111, - "ĠìķĬìĿĦ": 32112, - "Ġbá»ĭ": 32113, - "ĠSega": 32114, - "çe": 32115, - "Ġprogrammer": 32116, - "bli": 32117, - "Ġunl": 32118, - "Ġenslaved": 32119, - "Ġsociété": 32120, - "Äģh": 32121, - "Ġinheritance": 32122, - "ĠBangl": 32123, - "ermaid": 32124, - "Ġpractitioner": 32125, - "ĠStalin": 32126, - "ĠUser": 32127, - "cible": 32128, - "Ġcardiac": 32129, - "ĠKoreans": 32130, - "Ġdumped": 32131, - "Ġ×Ķ×Ļ×Ķ": 32132, - "áis": 32133, - "Ġhydraulic": 32134, - "oubtedly": 32135, - "ĠPit": 32136, - "Ġpicnic": 32137, - "Ġbehöver": 32138, - "ĠÑģмог": 32139, - "Ġbraking": 32140, - "é»ij": 32141, - "utar": 32142, - "ĠìĦ¸ë": 32143, - "ubl": 32144, - "Ġüz": 32145, - "Ġmajesty": 32146, - "Ġbers": 32147, - "utable": 32148, - "Ġhotter": 32149, - "çħ§": 32150, - "ÛĮÙĨ": 32151, - "Ġbiases": 32152, - "Ġsubjected": 32153, - "Ġnaughty": 32154, - "Ġcircus": 32155, - "ãģĹãģĭ": 32156, - "ĠImmedi": 32157, - "ĠStefan": 32158, - "ĠTriple": 32159, - "enk": 32160, - "Ġwit": 32161, - "Ġrecycle": 32162, - "emie": 32163, - "dated": 32164, - "Ġunload": 32165, - "Ġpopula": 32166, - "chin": 32167, - "Ġyields": 32168, - "Ġenglish": 32169, - "ĠBonnie": 32170, - "Ġspiders": 32171, - "Ãģ": 32172, - "Ġerosion": 32173, - "éĥ¨åĪĨ": 32174, - "ĠNICK": 32175, - "иÑıÑħ": 32176, - "Ġimpart": 32177, - "Ġкни": 32178, - "Ġresolutions": 32179, - "Ġlithium": 32180, - "Ġconvergence": 32181, - "ĠTara": 32182, - "Ġдве": 32183, - "ths": 32184, - "ĠCindy": 32185, - "æĪijè¦ģ": 32186, - "幫": 32187, - "ĠDIE": 32188, - "Ġassurance": 32189, - "ĠопиÑģ": 32190, - "Ġbuckets": 32191, - "Ġcues": 32192, - "ĠQuiet": 32193, - "Ġsimilarity": 32194, - "Ġfoundational": 32195, - "ĠMinist": 32196, - "滿": 32197, - "Ġpian": 32198, - "Ġcentr": 32199, - "Ġnumb": 32200, - "Ġmonks": 32201, - "ujourd": 32202, - "enzie": 32203, - "Ġskateboard": 32204, - "Ġdlatego": 32205, - "ĠÑģоÑĤ": 32206, - "ĠAE": 32207, - "Ġmasterpiece": 32208, - "ĠSolomon": 32209, - "ĠReddit": 32210, - "Ġriot": 32211, - "abl": 32212, - "ĠJazz": 32213, - "Ġelectromagnetic": 32214, - "Ġinsecure": 32215, - "ĠCompet": 32216, - "geries": 32217, - "обод": 32218, - "ł×ķ": 32219, - "ðŁĴ": 32220, - "Ġsenators": 32221, - "ĠBrisbane": 32222, - "ĠAlb": 32223, - "uttering": 32224, - "ĠAllow": 32225, - "zero": 32226, - "Ġpai": 32227, - "ĠÐIJлекÑģ": 32228, - "ĠDisplay": 32229, - "ĠBlade": 32230, - "ĠApps": 32231, - "Ġpä": 32232, - "ĠдеÑģÑı": 32233, - "Ġquella": 32234, - "ĠGao": 32235, - "еннÑĭÑħ": 32236, - "Ġspoilers": 32237, - "Ġgallons": 32238, - "ĠÙĦÙĬ": 32239, - "ĠZion": 32240, - "æľīä¸Ģ": 32241, - "onie": 32242, - "ragt": 32243, - "ĠChand": 32244, - "Ġë³ij": 32245, - "Ġblunt": 32246, - "Ġusu": 32247, - "ĠKad": 32248, - "rakt": 32249, - "Ġcinematic": 32250, - "Ġammunition": 32251, - "rene": 32252, - "Ġfourteen": 32253, - "ĠCarn": 32254, - "crit": 32255, - "Ġtenure": 32256, - "vu": 32257, - "Ġprincipalmente": 32258, - "Ġalleen": 32259, - "éĢĻä¸Ģ": 32260, - "Ġkomplett": 32261, - "Ġdüny": 32262, - "James": 32263, - "Ġreceptor": 32264, - "Ġoneself": 32265, - "guru": 32266, - "Ġmerchant": 32267, - "liness": 32268, - "Ġoverlooked": 32269, - "Ġharmonic": 32270, - "éķ¿": 32271, - "ieso": 32272, - "×ķ×ŀ": 32273, - "colm": 32274, - "ĠпÑĢоекÑĤ": 32275, - "ĠAda": 32276, - "اس": 32277, - "Tim": 32278, - "Ġrecurring": 32279, - "Ġproceeds": 32280, - "ĠParticularly": 32281, - "ĠDownload": 32282, - "etrical": 32283, - "Ġmatrices": 32284, - "Ġproyecto": 32285, - "ancies": 32286, - "ĠUhm": 32287, - "Ġcaves": 32288, - "Ġìĸ´ëł¤": 32289, - "ĠLeaf": 32290, - "ĠобÑĭÑĩ": 32291, - "ĠìĿ´ìľł": 32292, - "Europe": 32293, - "ĠtÄħ": 32294, - "Ġpuls": 32295, - "Ġtakiego": 32296, - "ÐĿе": 32297, - "GU": 32298, - "Ġfors": 32299, - "Ïģγ": 32300, - "Ġfotos": 32301, - "Ġ))": 32302, - "Ġ멤ë": 32303, - "Ġaquilo": 32304, - "ĠKurd": 32305, - "ï¸ı": 32306, - "ptic": 32307, - "ĠDort": 32308, - "Ġmisery": 32309, - "auso": 32310, - "åĬŁ": 32311, - "chuckling": 32312, - "ĠRidge": 32313, - "ĠíĸĪìĬµëĭĪëĭ¤": 32314, - "Ġ***": 32315, - "客": 32316, - "ĠHmmm": 32317, - "Ġgeographic": 32318, - "Ġanys": 32319, - "Ġtalvez": 32320, - "Ġskelet": 32321, - "Ġsignatures": 32322, - "Ġliters": 32323, - "IJë©´": 32324, - "ĠÑģвоего": 32325, - "Ġskiing": 32326, - "ĠÐľÐ¾Ñģ": 32327, - "Ġadopting": 32328, - "Ġhaft": 32329, - "Ġsymmetric": 32330, - "ĠLiqu": 32331, - "Ġthyroid": 32332, - "Ġmisin": 32333, - "lude": 32334, - "Ġhull": 32335, - "ĠXD": 32336, - "ĠGust": 32337, - "zeich": 32338, - "Ġvibrations": 32339, - "Ġesemp": 32340, - "ĠвÑģÑİ": 32341, - "ĠQuem": 32342, - "Ġübrig": 32343, - "ĠSke": 32344, - "ĠLynch": 32345, - "rooms": 32346, - "artet": 32347, - "fest": 32348, - "Ġfrüher": 32349, - "Ġlure": 32350, - "ä¸į好æĦıæĢĿ": 32351, - "ĠìķĮìķĦ": 32352, - "ĠWIN": 32353, - "ĠRYAN": 32354, - "ĠкоÑĤоÑĢÑĥÑİ": 32355, - "ĠKash": 32356, - "Ġ×Ķ×ŀ": 32357, - "Ġsafeg": 32358, - "ĠHallelujah": 32359, - "ĠдвÑĥÑħ": 32360, - "Ġstaple": 32361, - "Ġsediment": 32362, - "ĠActs": 32363, - "Ġblaming": 32364, - "Ġmainland": 32365, - "Ġsporting": 32366, - "Ġdecorations": 32367, - "Ġexecuting": 32368, - "Ġparan": 32369, - "ĠDollar": 32370, - "Ġprojections": 32371, - "Ġcommissioned": 32372, - "Ġbour": 32373, - "öm": 32374, - "Ġsteamed": 32375, - "ĠëŃĺ": 32376, - "Ġpetrol": 32377, - "Ġcelular": 32378, - "帶": 32379, - "ĠHungary": 32380, - "Ġrented": 32381, - "ĠваÑĢи": 32382, - "bbie": 32383, - "Ġsécur": 32384, - "üll": 32385, - "Ġswings": 32386, - "between": 32387, - "ĠиÑĤ": 32388, - "estro": 32389, - "Ġniemand": 32390, - "ĠìĤ¼": 32391, - "ĠPardon": 32392, - "esses": 32393, - "ĠMID": 32394, - "Ġcentralized": 32395, - "ĠAlien": 32396, - "culos": 32397, - "Ġcrise": 32398, - "裡éĿ¢": 32399, - "Ġclasse": 32400, - "beitet": 32401, - "iÄŁi": 32402, - "Ġwhales": 32403, - "Ġperimeter": 32404, - "Ġtying": 32405, - "Ġstrony": 32406, - "Ġlikewise": 32407, - "ĠPunch": 32408, - "Da": 32409, - "ĠBaptist": 32410, - "Ġsorting": 32411, - "Ġiv": 32412, - "Ġíķ©": 32413, - "Ġrehab": 32414, - "Ġeta": 32415, - "river": 32416, - "Ġsai": 32417, - "ãģĦãģŁãģł": 32418, - "odus": 32419, - "ãģĬé¡ĺãģĦãģĹãģ¾ãģĻ": 32420, - "Ġessayer": 32421, - "Ġturtles": 32422, - "ĠHazrat": 32423, - "Ġfabrics": 32424, - "Ġcavity": 32425, - "Ġponieważ": 32426, - "Ġschlecht": 32427, - "Ġsalsa": 32428, - "ÅŁekkür": 32429, - "Ġseating": 32430, - "Ġeconomists": 32431, - "Ġmang": 32432, - "Ġseguinte": 32433, - "Ġrang": 32434, - "Ġratios": 32435, - "Ġconstell": 32436, - "Ġlongtemps": 32437, - "uating": 32438, - "Ġspoiled": 32439, - "Ġrecipients": 32440, - "Ġsniper": 32441, - "ä¹ĭåīį": 32442, - "ìĬµëĭĪê¹Į": 32443, - "Ġwp": 32444, - "ĠLINKE": 32445, - "Ġflare": 32446, - "ĠAdri": 32447, - "ñas": 32448, - "Ġbackl": 32449, - "mÃ¤ÃŁ": 32450, - "ĠBend": 32451, - "Ġworkloads": 32452, - "ĠÑģÑĥп": 32453, - "Ġ1975": 32454, - "имÑģÑı": 32455, - "ане": 32456, - "Ġмон": 32457, - "Ġaspirations": 32458, - "ĠAer": 32459, - "ĠговоÑĢиÑĤÑĮ": 32460, - "ĠQian": 32461, - "å¦Ī": 32462, - "Ġcompromised": 32463, - "Ġyolk": 32464, - "лаÑģÑĤ": 32465, - "Ġhemen": 32466, - "rove": 32467, - "dens": 32468, - "ĠкомменÑĤ": 32469, - "Ġ---": 32470, - "Ġfluores": 32471, - "ноÑģ": 32472, - "ĠLiverpool": 32473, - "ĠÑģобой": 32474, - "ĠZwe": 32475, - "Ġlumin": 32476, - "ĠOG": 32477, - "á¸": 32478, - "holm": 32479, - "profits": 32480, - "SN": 32481, - "Ġproportions": 32482, - "Ġmica": 32483, - "ĠBoh": 32484, - "ĠAtlas": 32485, - "Ġunsure": 32486, - "Ġtouring": 32487, - "Ġnied": 32488, - "ĠtÄĻ": 32489, - "Ġimperative": 32490, - "Ġdemek": 32491, - "ĠSheriff": 32492, - "rance": 32493, - "Ġhomeland": 32494, - "ĠHail": 32495, - "ĠGanz": 32496, - "ymm": 32497, - "Mon": 32498, - "åĨ·": 32499, - "vida": 32500, - "Ġdesarroll": 32501, - "æĬĢ": 32502, - "Ġintriguing": 32503, - "ĠHugo": 32504, - "ĠãĤĤ": 32505, - "é¬": 32506, - "аÑĨ": 32507, - "ĠWiÄĻc": 32508, - "atted": 32509, - "ĠìķĦëĭĪê³ł": 32510, - "ĠVari": 32511, - "ád": 32512, - "Ġsurreal": 32513, - "Ġdisparities": 32514, - "Ġmó": 32515, - "ullen": 32516, - "ĠìŀĪëĭ¤ê³ł": 32517, - "ĠпожалÑĥйÑģÑĤа": 32518, - "Ġmains": 32519, - "Ġeject": 32520, - "Ġmethane": 32521, - "Ġmarginalized": 32522, - "Ġchilli": 32523, - "rès": 32524, - "Ġyem": 32525, - "ä½łæĺ¯": 32526, - "ĠChun": 32527, - "Ġdebts": 32528, - "Ġdownloading": 32529, - "ĠAthens": 32530, - "isierung": 32531, - "ryn": 32532, - "Ġtekn": 32533, - "ĠQuindi": 32534, - "éľĢ": 32535, - "Ġtaraf": 32536, - "Ġhé": 32537, - "Ġconsciously": 32538, - "Ġfixes": 32539, - "uckle": 32540, - "mayın": 32541, - "Ġfrei": 32542, - "Ġspa": 32543, - "Ġì§Ħíĸī": 32544, - "ĠاÙĦØ°": 32545, - "ĠÑĥк": 32546, - "lett": 32547, - "ĠolmuÅŁ": 32548, - "Ġcheesy": 32549, - "าà¸ģ": 32550, - "naire": 32551, - "Ġwiden": 32552, - "Ġlien": 32553, - "Ġescaping": 32554, - "iggs": 32555, - "ĠBlick": 32556, - "cÄħ": 32557, - "ĠìĦľë": 32558, - "Ġ×Ķס": 32559, - "ĠвпеÑĢ": 32560, - "ophone": 32561, - "iell": 32562, - "ĠSUBSCRI": 32563, - "Ġlions": 32564, - "Ġê·¸ê²ĥ": 32565, - "Ġinspires": 32566, - "Ġguarantees": 32567, - "Ġcomeça": 32568, - "ĠGrowing": 32569, - "Ġneglig": 32570, - "ĠFrankf": 32571, - "Ġgegeben": 32572, - "ĠÄijầu": 32573, - "Ġendlich": 32574, - "Ġìį¨": 32575, - "ĠTT": 32576, - "ĠLith": 32577, - "ÏĢα": 32578, - "astern": 32579, - "ĠAzer": 32580, - "Ġlunar": 32581, - "hic": 32582, - "ĠнаÑĢод": 32583, - "Ġnenhum": 32584, - "è·ij": 32585, - "ĠSalvador": 32586, - "ĠProgress": 32587, - "Ġprivileges": 32588, - "ĠëıĻìķĪ": 32589, - "Ġantagon": 32590, - "ĠImpf": 32591, - "Ġdescub": 32592, - "ĠLei": 32593, - "ĠìĥĪë¡ľ": 32594, - "Ñĩе": 32595, - "Ġdólares": 32596, - "ĠMeghan": 32597, - "ĠWire": 32598, - "too": 32599, - "aying": 32600, - "usc": 32601, - "Ġtud": 32602, - "Ġappeals": 32603, - "educ": 32604, - "Ġpane": 32605, - "Ġji": 32606, - "Ġdecks": 32607, - "ĠAlter": 32608, - "Ġå°±": 32609, - "ìĦ¤": 32610, - "åĪĨéIJĺ": 32611, - "Ġproductions": 32612, - "ĠWILLIAM": 32613, - "Ġimplied": 32614, - "Ġfulfillment": 32615, - "ĠAah": 32616, - "Ġsaja": 32617, - "xus": 32618, - "ĠÎļαι": 32619, - "Ãłs": 32620, - "ucch": 32621, - "око": 32622, - "ĠDiscord": 32623, - "ĠSY": 32624, - "jsk": 32625, - "ĠWallace": 32626, - "unction": 32627, - "Daniel": 32628, - "Ġköt": 32629, - "ijah": 32630, - "Ġmarche": 32631, - "Ġdisgr": 32632, - "Ġmungkin": 32633, - "Ġalma": 32634, - "³µ": 32635, - "Ġextensively": 32636, - "ĠFloren": 32637, - "ĠAllison": 32638, - "ãĤ±": 32639, - "ÙĬÙħ": 32640, - "Ġjuven": 32641, - "ĠRenaissance": 32642, - "Ġfundraising": 32643, - "ĠChaos": 32644, - "Ġparaly": 32645, - "Ġnarrator": 32646, - "Ġecosystems": 32647, - "Ash": 32648, - "Ġmitigation": 32649, - "ĠAujourd": 32650, - "ĠIdee": 32651, - "!,": 32652, - "Ġ½": 32653, - "Ġlandlord": 32654, - "Ġdefects": 32655, - "Ġacre": 32656, - "ulsive": 32657, - "Ġalgae": 32658, - "pek": 32659, - "Ġemba": 32660, - "ĠRoc": 32661, - "éĽ¢": 32662, - "ksom": 32663, - "äche": 32664, - "Ġleuk": 32665, - "Ġleveraging": 32666, - "Ġê·¸ëłĩì§Ģ": 32667, - "ĠPalm": 32668, - "Ġäven": 32669, - "Ġlis": 32670, - "ĠInsp": 32671, - "ĠRita": 32672, - "ĠAbb": 32673, - "ithm": 32674, - "Ġsupervision": 32675, - "Ġrevisit": 32676, - "ĠpiÄĻ": 32677, - "Ġeuh": 32678, - "Ġfades": 32679, - "Ġmotto": 32680, - "åį¡": 32681, - "езж": 32682, - "ĠShim": 32683, - "Ġrelevance": 32684, - "Ġoo": 32685, - "Ġostat": 32686, - "nica": 32687, - "Ġchoix": 32688, - "ĠFaculty": 32689, - "Ġì¤ijìĹIJ": 32690, - "ĠAbove": 32691, - "ĠнеболÑĮÑĪ": 32692, - "Ġsequencing": 32693, - "Ġnutrient": 32694, - "Ġconquered": 32695, - "Ġdigestive": 32696, - "Ġbackdrop": 32697, - "ĠLori": 32698, - "ailable": 32699, - "Game": 32700, - "Ġneglected": 32701, - "omorph": 32702, - "illah": 32703, - "Ġkne": 32704, - "Ġsiitä": 32705, - "Ġworkspace": 32706, - "ĠVenice": 32707, - "ĠKne": 32708, - "Ñīо": 32709, - "ħĢ": 32710, - "ĠHass": 32711, - "Ġvita": 32712, - "Ŀ¼ë©´": 32713, - "Ġlays": 32714, - "ências": 32715, - "érica": 32716, - "ĠLl": 32717, - "æ±Ĥ": 32718, - "ĠCoca": 32719, - "ĠWHY": 32720, - "èĪŀ": 32721, - "Ġrouting": 32722, - "Ġpermissions": 32723, - "Ġdings": 32724, - "prend": 32725, - "program": 32726, - "Ġcrocod": 32727, - "bral": 32728, - "AAAAAAAA": 32729, - "agit": 32730, - "ĠNä": 32731, - "Ġgekommen": 32732, - "atten": 32733, - "Ġreferenced": 32734, - "Ġpairing": 32735, - "ĠPartner": 32736, - "ĠCoronavirus": 32737, - "ÑĸÑģ": 32738, - "è½ī": 32739, - "Ġ×Ķ×ĵ": 32740, - "ĠespecÃŃfic": 32741, - "arsi": 32742, - "quelle": 32743, - "Ġspontaneous": 32744, - "çĨ±": 32745, - "Ġê²ĥìĿĦ": 32746, - "ĠÐŁÐ¾Ñģле": 32747, - "ĠاÙĦد": 32748, - "ĠShout": 32749, - "Ġнал": 32750, - "Ġdisguise": 32751, - "ĠJord": 32752, - "Ġwee": 32753, - "Ġmiejsc": 32754, - "Ġserum": 32755, - "Ġplaisir": 32756, - "Ġcredible": 32757, - "ĠbÃ¥": 32758, - "ĠAJ": 32759, - "mares": 32760, - "Ġrods": 32761, - "Ġeran": 32762, - "ãģ¾ãģĤ": 32763, - "Ġpää": 32764, - "ĠUA": 32765, - "ĠUnknown": 32766, - "ĠÙĦÙħ": 32767, - "ĠRabbi": 32768, - "Ġlaat": 32769, - "Ġhairstyle": 32770, - "Ġغ": 32771, - "éģĭ": 32772, - "Ġcach": 32773, - "ĠWriting": 32774, - "оÑĩки": 32775, - "abad": 32776, - "Ġstraighten": 32777, - "--\"": 32778, - "wife": 32779, - "Ġhottest": 32780, - "Ġpunya": 32781, - "ĠFashion": 32782, - "griff": 32783, - "ĠQR": 32784, - "otch": 32785, - "ĠÐľÐ¾Ð¶ÐµÑĤ": 32786, - "Cloud": 32787, - "ĠStrike": 32788, - "ĠHein": 32789, - "Ġ羣çļĦ": 32790, - "Ġlei": 32791, - "ĠFlow": 32792, - "wegs": 32793, - "Ġhabr": 32794, - "åīĽåīĽ": 32795, - "nahme": 32796, - "Ìģ": 32797, - "Ġpleasing": 32798, - "opping": 32799, - "Ġ구ëıħ": 32800, - "Ġdran": 32801, - "Ġbangs": 32802, - "Ġ79": 32803, - "Ġsket": 32804, - "Ġcaval": 32805, - "ĠMacron": 32806, - "Ġweighted": 32807, - "Ġmuted": 32808, - "Ġnuestras": 32809, - "EEP": 32810, - "Ġmathematic": 32811, - "ĠMRI": 32812, - "agus": 32813, - "Ġtherapies": 32814, - "θε": 32815, - "Ġunpl": 32816, - "Ġcommencer": 32817, - "full": 32818, - "Ġtowels": 32819, - "Ġprue": 32820, - "Ġlicenses": 32821, - "׼×ķ׾": 32822, - "ĠÐŁÐ¾ÑĩемÑĥ": 32823, - "Ġpointless": 32824, - "Bye": 32825, - "Ġeligibility": 32826, - "Ġscrape": 32827, - "Ġabusive": 32828, - "ĠMant": 32829, - "Ġjeunes": 32830, - "tal": 32831, - "ĠPrincip": 32832, - "ĠOrthodox": 32833, - "Ġmelod": 32834, - "ĠмаÑĤеÑĢи": 32835, - "Ġprosecutor": 32836, - "Ġopioid": 32837, - "ĠÑĥвеÑĢ": 32838, - "ĠBeen": 32839, - "Ġìłijì¢ħ": 32840, - "Ġdynasty": 32841, - "Ġajuda": 32842, - "Ġentreg": 32843, - "Ġweighed": 32844, - "Ġeure": 32845, - "ĠBem": 32846, - "Ġabnormal": 32847, - "82": 32848, - "ĠJR": 32849, - "ĠAkt": 32850, - "ĠBri": 32851, - "út": 32852, - "Ġstagn": 32853, - "!*": 32854, - "Ġwegen": 32855, - "Ġleaking": 32856, - "ĠWords": 32857, - "ĠMau": 32858, - "Ġvue": 32859, - "ĠLiam": 32860, - "анием": 32861, - "Ġclinicians": 32862, - "ĠPump": 32863, - "Ġförst": 32864, - "?...": 32865, - "Ġautomotive": 32866, - "ĠOwen": 32867, - "zusagen": 32868, - "ĠHundred": 32869, - "Ġdecentralized": 32870, - "Ġbulbs": 32871, - "Ġ׾׼": 32872, - "Ġprovinces": 32873, - "ĠMilan": 32874, - "81": 32875, - "kas": 32876, - "Ġëĵ£": 32877, - "Ġforça": 32878, - "Ġrightly": 32879, - "島": 32880, - "rÄħ": 32881, - "Ġvenues": 32882, - "Ġwai": 32883, - "Ġpredicting": 32884, - "ĠWiFi": 32885, - "Ġê¶ģê¸Ī": 32886, - "رÙĪ": 32887, - "Ġ×Ķ×ĸ": 32888, - "century": 32889, - "Ġgradual": 32890, - "ĠProbleme": 32891, - "ĠìĹħ": 32892, - "Ġcoping": 32893, - "ĠBrus": 32894, - "Ġpeanuts": 32895, - "irtschaft": 32896, - "Ġзал": 32897, - "ĠTroy": 32898, - "Ġsperm": 32899, - "ĠMitar": 32900, - "ĠTürkiye": 32901, - "grand": 32902, - "¦Ń": 32903, - "Ġ×ŀס": 32904, - "Ġpans": 32905, - "ĠKnowledge": 32906, - "berly": 32907, - "ĠÐķго": 32908, - "Ġdanced": 32909, - "ĠFrost": 32910, - "ĠBurg": 32911, - "Ġbiting": 32912, - "ìłķìĿĦ": 32913, - "meal": 32914, - "Ġheroic": 32915, - "Ġmotherboard": 32916, - "ĠLicht": 32917, - "ãģ£ãģ": 32918, - "llan": 32919, - "айн": 32920, - "ĠÑĢÑıд": 32921, - "Ġà¹Ģà¸": 32922, - "onen": 32923, - "irie": 32924, - "Art": 32925, - "rang": 32926, - "νη": 32927, - "Ġnewborn": 32928, - "Ġamis": 32929, - "ĠاÙĪر": 32930, - "Ġsophom": 32931, - "ĠCareful": 32932, - "Ġprospects": 32933, - "ensen": 32934, - "Ġthrill": 32935, - "ĠViá»ĩt": 32936, - "Adam": 32937, - "rition": 32938, - "entric": 32939, - "uden": 32940, - "Ġcertificates": 32941, - "Ġashes": 32942, - "調": 32943, - "playing": 32944, - "Ġsadece": 32945, - "Ġost": 32946, - "Ġairplanes": 32947, - "ÑĢок": 32948, - "oner": 32949, - "Ġmagnesium": 32950, - "Ġgoddamn": 32951, - "Ġ1972": 32952, - "ĠSchule": 32953, - "Ġtemat": 32954, - "Ġpartout": 32955, - "à¯Ĥ": 32956, - "Ġinve": 32957, - "ĠScientists": 32958, - "ĠHudson": 32959, - "winning": 32960, - "ceksin": 32961, - "Ġcongressional": 32962, - "oru": 32963, - "Ġropes": 32964, - "вед": 32965, - "Ġmadre": 32966, - "Ġferry": 32967, - "ĠCohen": 32968, - "ĠPred": 32969, - "Ġvagy": 32970, - "ĠбеÑģп": 32971, - "Ġmultim": 32972, - "Ġdrainage": 32973, - "Ġsimulator": 32974, - "giggles": 32975, - "ĠStadium": 32976, - "обÑī": 32977, - "Ġnotices": 32978, - "Ġcrawling": 32979, - "Ġgroupe": 32980, - "åı¸": 32981, - "ĠktoÅĽ": 32982, - "ĠYoga": 32983, - "Ġmedida": 32984, - "ĠÑħваÑĤ": 32985, - "ĠLite": 32986, - "Ġrav": 32987, - "orama": 32988, - "Ġdiscord": 32989, - "ĠDIRE": 32990, - "Ġteh": 32991, - "ĠNurs": 32992, - "ç²ī": 32993, - "Ġpitched": 32994, - "Ġbarking": 32995, - "ĠCoke": 32996, - "wiad": 32997, - "Ġpopulated": 32998, - "éĻ¤": 32999, - "pelled": 33000, - "Ġбог": 33001, - "Ġpewno": 33002, - "ĠCube": 33003, - "Ġrecruited": 33004, - "éĢĻ種": 33005, - "ĠCara": 33006, - "ıģını": 33007, - "imated": 33008, - "ĠÑĪкол": 33009, - "icional": 33010, - "ĠпÑĢоÑĦ": 33011, - "Ġcontamination": 33012, - "Ġúltimos": 33013, - "Ġfearful": 33014, - "Ġelephants": 33015, - "usi": 33016, - "ĠiTunes": 33017, - "ĠSwami": 33018, - "ê¼": 33019, - "ĠìĦ¤ëªħ": 33020, - "ĠRichards": 33021, - "Ġmagnets": 33022, - "ĠRichtung": 33023, - "ĠLegion": 33024, - "èıľ": 33025, - "Ġkitty": 33026, - "Ġkissed": 33027, - "Ġwatering": 33028, - "Ġcono": 33029, - "ĠPalestine": 33030, - "idir": 33031, - "Ġmaze": 33032, - "Ġfluids": 33033, - "ĠProducer": 33034, - "ĠKrsna": 33035, - "好åķ¦": 33036, - "laf": 33037, - "Ġ×IJ×ķ": 33038, - "Ġmiesz": 33039, - "ĠXing": 33040, - "ointed": 33041, - "sein": 33042, - "ĠFuk": 33043, - "ĠDepression": 33044, - "ĠDuty": 33045, - "ĠPanther": 33046, - "Ġsund": 33047, - "Ġrefere": 33048, - "Ġexclusion": 33049, - "Ġnaval": 33050, - "ĠWinston": 33051, - "Ġslogan": 33052, - "Ġhypothetical": 33053, - "Ġelevate": 33054, - "ëł¹": 33055, - "Ġcabeça": 33056, - "ĠGesund": 33057, - "meter": 33058, - "ĠìķĦëĭĪë©´": 33059, - "Ġcloudy": 33060, - "âĢ¦?": 33061, - "ĠSchritt": 33062, - "ĠJS": 33063, - "ìį": 33064, - "ĠSprings": 33065, - "ĠBatter": 33066, - "·°": 33067, - "Ġtailor": 33068, - "ĠPTSD": 33069, - "ĠGent": 33070, - "ĠbaÄŁ": 33071, - "Ġspatula": 33072, - "Ġcray": 33073, - "ĠLegisl": 33074, - "Ġsú": 33075, - "Ġleve": 33076, - "าม": 33077, - "Ġerad": 33078, - "Ġdong": 33079, - "Ġderm": 33080, - "ĠBanks": 33081, - "icho": 33082, - "åħĪçĶŁ": 33083, - "ĠFranz": 33084, - "ravel": 33085, - "éģĶ": 33086, - "оло": 33087, - "Ġflute": 33088, - "ĠEk": 33089, - "Ġjoyful": 33090, - "Ġchased": 33091, - "ĠLarge": 33092, - "Over": 33093, - "Ġentrepreneurial": 33094, - "Ġconsiders": 33095, - "Ñĥем": 33096, - "opa": 33097, - "Ġdormir": 33098, - "ĠElementary": 33099, - "Ġprzypad": 33100, - "ÑĥÑģка": 33101, - "ĠоÑĩеÑĢ": 33102, - "ugene": 33103, - "Ġtenido": 33104, - "Ġlugares": 33105, - "ë¥": 33106, - "ĠÑĩаÑģÑĤ": 33107, - "Ġsao": 33108, - "Ġbraid": 33109, - "ĠVere": 33110, - "ĠReich": 33111, - "ĠPoss": 33112, - "Ġinan": 33113, - "wand": 33114, - "ref": 33115, - "Ġmontrer": 33116, - "Ġ1981": 33117, - "çķª": 33118, - "asında": 33119, - "Ġchrome": 33120, - "ĠTrinity": 33121, - "Ġexploitation": 33122, - "ĠSense": 33123, - "ĠCMS": 33124, - "ĠNoble": 33125, - "ĠìĦłíĥĿ": 33126, - "Ġswelling": 33127, - "electronic": 33128, - "]?": 33129, - "Ġbrushing": 33130, - "Ġliquidity": 33131, - "ĠHook": 33132, - "ĠConnor": 33133, - "ĠAlum": 33134, - "Ġgucken": 33135, - "suite": 33136, - "Ġwiele": 33137, - "Ġbarrels": 33138, - "ĠRegel": 33139, - "ĠMent": 33140, - "ĠTrip": 33141, - "ĠBrush": 33142, - "ĠErik": 33143, - "urate": 33144, - "ÉĻr": 33145, - "ĠCyr": 33146, - "ouble": 33147, - "ĠBecca": 33148, - "Ġpasswords": 33149, - "ű": 33150, - "borg": 33151, - "Ġvendo": 33152, - "ĠClaus": 33153, - "ĠFaz": 33154, - "indest": 33155, - "Ġdeceased": 33156, - "Ġcomparisons": 33157, - "ĠLCD": 33158, - "ĠPork": 33159, - "Ġeventual": 33160, - "Ġpatreon": 33161, - "Ġinability": 33162, - "Ġextinction": 33163, - "Ġì¢ĭìķĦíķĺëĬĶ": 33164, - "ĠÑģоÑģ": 33165, - "aju": 33166, - "Ġ×ij×IJ×": 33167, - "Ġsofort": 33168, - "Ġdestined": 33169, - "ĠRin": 33170, - "Ġmouths": 33171, - "ĠNatürlich": 33172, - "Ġpreserving": 33173, - "Ġlimp": 33174, - "黨": 33175, - "ocused": 33176, - "инг": 33177, - "Ġexposing": 33178, - "Ġξ": 33179, - "ëį": 33180, - "laugh": 33181, - "Ġhiss": 33182, - "ãģłãģĭãĤī": 33183, - "Ġindie": 33184, - "Ġdetal": 33185, - "ÑĢавÑģÑĤв": 33186, - "Ġtrên": 33187, - "æķ°": 33188, - "Ġogni": 33189, - "Ġsimplemente": 33190, - "Ġ1978": 33191, - "Ġgoo": 33192, - "Ġ1967": 33193, - "Ġgenug": 33194, - "hö": 33195, - "Ġhistó": 33196, - "å®Ł": 33197, - "Ġlobster": 33198, - "cendo": 33199, - "Ġteil": 33200, - "Ġallevi": 33201, - "0000": 33202, - "OLD": 33203, - "Ġpesos": 33204, - "Ġbonuses": 33205, - "Ġami": 33206, - "Ġrevival": 33207, - "ĠHorse": 33208, - "Ġsack": 33209, - "Talk": 33210, - "Ġmulher": 33211, - "ĠпоÑģÑĤоÑıн": 33212, - "ĠHood": 33213, - "Huh": 33214, - "Ġë¶ģ": 33215, - "Ġhyung": 33216, - "ĠMeeting": 33217, - "Ġimporta": 33218, - "Ġì°¾ìķĦ": 33219, - "ĠVern": 33220, - "Ġstripped": 33221, - "Ġrefuses": 33222, - "Ġqualifications": 33223, - "opl": 33224, - "ĢëıĦ": 33225, - "ixÃŃ": 33226, - "Ġdiab": 33227, - "itime": 33228, - "flows": 33229, - "Ġinac": 33230, - "ĠGong": 33231, - "Ġmeaningless": 33232, - "Ġcourageous": 33233, - "Ġmicrobi": 33234, - "azy": 33235, - "hist": 33236, - "Ġvolunteering": 33237, - "VIE": 33238, - "Ġviolated": 33239, - "Ġsympathy": 33240, - "ĠEdit": 33241, - "好åĥı": 33242, - "electric": 33243, - "product": 33244, - "Ġpandemia": 33245, - "Ġgeometric": 33246, - "ĠConvers": 33247, - "gre": 33248, - "Ġglut": 33249, - "isted": 33250, - "ĠاÙĦÙĥ": 33251, - "ĠChain": 33252, - "ĠPresent": 33253, - "ĠYin": 33254, - "ĠÑģог": 33255, - "ĠVlog": 33256, - "Ġìĸ´ë¨¸": 33257, - "Ġdonn": 33258, - "Ġhitch": 33259, - "ucking": 33260, - "ãģĬãģĦ": 33261, - "wald": 33262, - "risk": 33263, - "Ġhari": 33264, - "ĠKens": 33265, - "ĠIdol": 33266, - "Ġвнимание": 33267, - "Ġtodd": 33268, - "Ġsmashed": 33269, - "Ġinvari": 33270, - "ĠконÑĤÑĢ": 33271, - "Ġautistic": 33272, - "ìŀ¥ëĭĺ": 33273, - "Res": 33274, - "дÑĭ": 33275, - "chau": 33276, - "Ġselv": 33277, - "Ġhätten": 33278, - "ि": 33279, - "Ġexpects": 33280, - "Ïģη": 33281, - "Ġaçık": 33282, - "ĠHTTP": 33283, - "leÅŁ": 33284, - "Ġsweeping": 33285, - "ĠBeta": 33286, - "Ġcounterparts": 33287, - "abile": 33288, - "ĠSims": 33289, - "Cs": 33290, - "Ġrepar": 33291, - "squ": 33292, - "Ġprovincial": 33293, - "Ġshareholders": 33294, - "Ġrunter": 33295, - "Ġgedacht": 33296, - "ĠTeen": 33297, - "Ġgrands": 33298, - "çĶ¢": 33299, - "agles": 33300, - "Ġrocky": 33301, - "vens": 33302, - "Ġrivals": 33303, - "unal": 33304, - "Ġreacts": 33305, - "ë©": 33306, - "Ġmercury": 33307, - "ĠLuigi": 33308, - "Ġог": 33309, - "ĠJUST": 33310, - "Ġlod": 33311, - "Ġcortex": 33312, - "wig": 33313, - "Ġlakh": 33314, - "ì¤ijìĹIJ": 33315, - "ĠVic": 33316, - "ĠMund": 33317, - "Ġmapped": 33318, - "ĠDell": 33319, - "ĠDruck": 33320, - "Ġlifes": 33321, - "алÑĮное": 33322, - "ividual": 33323, - "adım": 33324, - "Ġatrav": 33325, - "ĠFlug": 33326, - "ĠKlein": 33327, - "ê±°ìķ¼": 33328, - "หà¸Ļ": 33329, - "Ġappli": 33330, - "ா?": 33331, - "üyorum": 33332, - "ĠинÑĤеÑĢеÑģно": 33333, - "Ġdisinfect": 33334, - ">-": 33335, - "Ġchampagne": 33336, - "Ġkla": 33337, - "opers": 33338, - "Trans": 33339, - "ĠDesert": 33340, - "Ġcultivate": 33341, - "ĠFucking": 33342, - "idelity": 33343, - "ĠÑĤан": 33344, - "Ġincub": 33345, - "Ġtemu": 33346, - "Ġlearner": 33347, - "founder": 33348, - "ĠSyl": 33349, - "ãĤĢ": 33350, - "Ġfato": 33351, - "zier": 33352, - "ĠìĹĨìĿ´": 33353, - "ĠìĪ¨": 33354, - "Ġpsycho": 33355, - "ĠÑĤелеÑĦ": 33356, - "Ġregarde": 33357, - "Ġrepresentations": 33358, - "Ġlitigation": 33359, - "Ġspann": 33360, - "ults": 33361, - "bior": 33362, - "è¦ĭãģ¦": 33363, - "ä¸įå¤ļ": 33364, - "ĠSurvey": 33365, - "ĠLEDs": 33366, - "Ġträ": 33367, - "Ġlên": 33368, - "Ġantioxid": 33369, - "еÑĢом": 33370, - "Ġinduction": 33371, - "Ġfooled": 33372, - "ätzlich": 33373, - "ĠговоÑĢÑıÑĤ": 33374, - "ĠFact": 33375, - "umbai": 33376, - "Ġwiggle": 33377, - "NOUN": 33378, - "Ġdévelopp": 33379, - "ĠClaro": 33380, - "Ġì¸": 33381, - "ë¬": 33382, - "ãģªãĤĵãģł": 33383, - "Ġaccumulate": 33384, - "Ġmaintains": 33385, - "ëĦ": 33386, - "ĠFighter": 33387, - "íĨł": 33388, - "Ġmatin": 33389, - "Ġcoupon": 33390, - "Ġstunt": 33391, - "Ġdebuted": 33392, - "å¾ħãģ£ãģ¦": 33393, - "Ġprag": 33394, - "иваем": 33395, - "73": 33396, - "Ġexpres": 33397, - "Ġìĺ¤ë¹ł": 33398, - "ĠпеÑĢÑģон": 33399, - "Ġcalculus": 33400, - "Ġabrupt": 33401, - "ĠInspector": 33402, - "ourt": 33403, - "æĸĻ": 33404, - "źniej": 33405, - "intense": 33406, - "Ba": 33407, - "Ġlounge": 33408, - "Ġasthma": 33409, - "ĠHiç": 33410, - "ª»": 33411, - "Ġeditorial": 33412, - "Ġseize": 33413, - "Ġkır": 33414, - "Ġmouve": 33415, - "Ġtierra": 33416, - "Ġtestosterone": 33417, - "Ġrh": 33418, - "ĠKingston": 33419, - "ELLE": 33420, - "ĠRepresentative": 33421, - "Ġ1974": 33422, - "Ġiba": 33423, - "Ts": 33424, - "Ġsorta": 33425, - "Ġ(?)": 33426, - "ĠتÙĪ": 33427, - "ĠëĤ´ëł¤": 33428, - "Ġbekommt": 33429, - "Ġspiritually": 33430, - "Ġdistorted": 33431, - "Mad": 33432, - "Ġreim": 33433, - "ánh": 33434, - "ĠOttoman": 33435, - "ĠRelig": 33436, - "ĠEls": 33437, - "Ġretained": 33438, - "ĠLaughs": 33439, - "æĢ»": 33440, - "ĠSAS": 33441, - "ĠколиÑĩеÑģÑĤво": 33442, - "×ķתר": 33443, - "Ġinnovate": 33444, - "Ġkork": 33445, - "ĠÑĢаÑģÑģказÑĭв": 33446, - "ondere": 33447, - "ivi": 33448, - "aye": 33449, - "ounty": 33450, - "ĠполÑĥÑĩаеÑĤÑģÑı": 33451, - "Ġbuns": 33452, - "åħ«": 33453, - "Ġyüzden": 33454, - "Ġsurgeries": 33455, - "Ø£ÙĨ": 33456, - "Ġbankruptcy": 33457, - "welt": 33458, - "Ġsiamo": 33459, - "Ġdarkest": 33460, - "ĠHann": 33461, - "gga": 33462, - "Ġformas": 33463, - "ĠDj": 33464, - "named": 33465, - "Ġshields": 33466, - "ueller": 33467, - "ĠFew": 33468, - "Ġlace": 33469, - "Ġfurious": 33470, - "ĠYU": 33471, - "Ġsocietal": 33472, - "Ġjudgement": 33473, - "ĠDos": 33474, - "Ġjab": 33475, - "laws": 33476, - "Ġreinvent": 33477, - "ĠKatherine": 33478, - "ĠChoi": 33479, - "adows": 33480, - "Ġrans": 33481, - "oden": 33482, - "ĠMidwest": 33483, - "nın": 33484, - "Ġdeport": 33485, - "ĠDip": 33486, - "ç´ħ": 33487, - "Ġatención": 33488, - "ĠCourtney": 33489, - "ividad": 33490, - "ĠÚ©Ûģ": 33491, - "Ġefficacy": 33492, - "ĠBrooks": 33493, - "Ġreferral": 33494, - "ĠконÑĨ": 33495, - "Ġmalicious": 33496, - "Ġkir": 33497, - "ĠGoddess": 33498, - "Ġfunky": 33499, - "Ġinterim": 33500, - "ĠKörper": 33501, - "Ġìĸ¼ë§": 33502, - "kur": 33503, - "Ġкли": 33504, - "Ġtrucs": 33505, - "gesetz": 33506, - "Ġzug": 33507, - "ĠGlück": 33508, - "ĠMinute": 33509, - "Ġprestigious": 33510, - "Ġniez": 33511, - "Ġconcentrations": 33512, - "лаÑģÑĤи": 33513, - "ĠSis": 33514, - "ĠVitamin": 33515, - "kov": 33516, - "ĠPBS": 33517, - "Ġнее": 33518, - "Ġretailers": 33519, - "Ġconventions": 33520, - "ĠSamantha": 33521, - "Ġproudly": 33522, - "Jordan": 33523, - "ĠJASON": 33524, - "atk": 33525, - "Ġtriste": 33526, - "Ġstär": 33527, - "Ġreiterate": 33528, - "Ġposterior": 33529, - "Ġ1973": 33530, - "ĠPine": 33531, - "ĠJuliet": 33532, - "Ġpedir": 33533, - "kil": 33534, - "Ġoverlapping": 33535, - "Ġexclude": 33536, - "Ġeconóm": 33537, - "Ġaccepts": 33538, - "ĠSter": 33539, - "決": 33540, - "Ġìļ´ëıĻ": 33541, - "estab": 33542, - "Ġtug": 33543, - "arg": 33544, - "Ġlivro": 33545, - "اص": 33546, - "Ġseams": 33547, - "Ġburaya": 33548, - "Ġello": 33549, - "ĠTM": 33550, - "ĠPaw": 33551, - "ĠIndex": 33552, - "Exc": 33553, - "Ġinspirational": 33554, - "Ġdunk": 33555, - "è°ģ": 33556, - "akter": 33557, - "Ġconditioner": 33558, - "ĠSalut": 33559, - "ÅĤec": 33560, - "Ġìī½": 33561, - "ĠÑĥзна": 33562, - "ĠRomeo": 33563, - "fruit": 33564, - "ĠYO": 33565, - "Ġchá»ī": 33566, - "бÑĥ": 33567, - "bons": 33568, - "Ġreproductive": 33569, - "Ġorada": 33570, - "Ġíļ¨": 33571, - "Ġtentar": 33572, - "Ġmañana": 33573, - "ãĤ¬": 33574, - "Ġsolvent": 33575, - "Jessica": 33576, - "ĠLegal": 33577, - "Ġtua": 33578, - "Ġsic": 33579, - "ĠEQ": 33580, - "aukee": 33581, - "ìĭľëĭ¤": 33582, - "ĠÅŀu": 33583, - "Ġadhere": 33584, - "ĠTul": 33585, - "Ġà®Ĩ": 33586, - "Ġtextbooks": 33587, - "ĠFifth": 33588, - "Ġexperi": 33589, - "Ġchic": 33590, - "Ġheap": 33591, - "inely": 33592, - "atra": 33593, - "Two": 33594, - "Ġhelemaal": 33595, - "Ġfren": 33596, - "æݨ": 33597, - "Ġbisher": 33598, - "اش": 33599, - "ĠìĦłìĥĿ": 33600, - "ĠTages": 33601, - "Ġsá»±": 33602, - "Ġbullied": 33603, - "ؤ": 33604, - "Ġbenefited": 33605, - "ĠPreviously": 33606, - "ĠÑįÑĦÑĦ": 33607, - "Ùį": 33608, - "Ġsenate": 33609, - "ĠMorm": 33610, - "ijke": 33611, - "ĠFlu": 33612, - "Ġincorporating": 33613, - "jack": 33614, - "ĠпиÑĤ": 33615, - "Ġimply": 33616, - "Ġhacks": 33617, - "ĠRICH": 33618, - "ĠкваÑĢ": 33619, - "ĠпÑĢекÑĢаÑģ": 33620, - "Ġdependency": 33621, - "Ġìļ©": 33622, - "Ġì±ħ": 33623, - "Ġwährend": 33624, - "Ġsulla": 33625, - "ĠPittsburgh": 33626, - "Ġesempio": 33627, - "¼ë¡ľ": 33628, - "prot": 33629, - "ĠRosen": 33630, - "ĠIndependence": 33631, - "Ġparsley": 33632, - "iegen": 33633, - "Ġhaw": 33634, - "Ġaquell": 33635, - "ĠCAP": 33636, - "ĠÑĢабоÑĤаÑĤÑĮ": 33637, - "ĠCliff": 33638, - "ionar": 33639, - "Ġsecuring": 33640, - "æĪijåĢijçļĦ": 33641, - "νε": 33642, - "Ġutilis": 33643, - "Ġcoule": 33644, - "ĠPing": 33645, - "Ġtrek": 33646, - "Ġfak": 33647, - "Ġenorme": 33648, - "Ġìĭ«": 33649, - "让": 33650, - "Ġdoubling": 33651, - "ĠнÑĢавиÑĤÑģÑı": 33652, - "Ġhed": 33653, - "hoven": 33654, - "ĠStanding": 33655, - "ĠmÃŃn": 33656, - "ĠJimin": 33657, - "Ġmonarch": 33658, - "Ġcoke": 33659, - "Ġmr": 33660, - "Ġclic": 33661, - "Ãį": 33662, - "Ġimpeachment": 33663, - "Ġdurability": 33664, - "Ġvarios": 33665, - "Ġcommercials": 33666, - "Ġgreetings": 33667, - "ĠRi": 33668, - "ĠAppreci": 33669, - "ìŀĪëĬĶ": 33670, - "Ġrésult": 33671, - "ért": 33672, - "Ġsalute": 33673, - "Ġpoderia": 33674, - "Ġsunrise": 33675, - "veck": 33676, - "Ġreluctant": 33677, - "Ġcommissioner": 33678, - "念": 33679, - "âte": 33680, - "ĠKenny": 33681, - "ĠSiri": 33682, - "ãĥĥãĥĹ": 33683, - "ĠëĬĺ": 33684, - "ĠEE": 33685, - "Ġunch": 33686, - "кон": 33687, - "ĠاÙĦØ¥": 33688, - "Ġbelts": 33689, - "Ġhass": 33690, - "ĠмоÑı": 33691, - "Ġdisplaced": 33692, - "Ġabra": 33693, - "ÎŃλ": 33694, - "Ġscratches": 33695, - "Ġcomet": 33696, - "Ġauthorization": 33697, - "ĠLLC": 33698, - "Ġproduk": 33699, - "Ġrehabilitation": 33700, - "åŀ": 33701, - "ÑĸÑĩ": 33702, - "uding": 33703, - "olit": 33704, - "Ġ105": 33705, - "Ġexpands": 33706, - "Ġaltri": 33707, - "ĠKomment": 33708, - "Ġanf": 33709, - "Pl": 33710, - "ĠMana": 33711, - "fed": 33712, - "Ġbri": 33713, - "Ġora": 33714, - "Gs": 33715, - "ĠGur": 33716, - "uckland": 33717, - "Ġjunction": 33718, - "Ġironic": 33719, - "ĠFeed": 33720, - "Ġprakt": 33721, - "ĠHammer": 33722, - "ĮëıĦ": 33723, - "ĠTracy": 33724, - "çµ±": 33725, - "ĠAside": 33726, - "него": 33727, - "ĠиÑģполÑĮзоваÑĤÑĮ": 33728, - "Ġzaj": 33729, - "Ġequitable": 33730, - "Ġcurb": 33731, - "ĠãģĵãĤĮ": 33732, - "Ġderivatives": 33733, - "Ġpuppies": 33734, - "ĠKenneth": 33735, - "ĠCompl": 33736, - "igram": 33737, - "ĠGarcia": 33738, - ")\"": 33739, - "ĠHarbor": 33740, - "estial": 33741, - "Ġä¾Ĩ": 33742, - "Ġers": 33743, - "æ¹": 33744, - "Ġunwanted": 33745, - "Ġbelang": 33746, - "аго": 33747, - "emb": 33748, - "dos": 33749, - "ĠìĻľë": 33750, - "ĠBudget": 33751, - "Ġbattling": 33752, - "ØŃت": 33753, - "kok": 33754, - "наÑĩала": 33755, - "Ġplag": 33756, - "Ġcantidad": 33757, - "Ġgrupos": 33758, - "Ġplugins": 33759, - "lerini": 33760, - "ĠимееÑĤ": 33761, - "Ġsozusagen": 33762, - "olics": 33763, - "Ġpueblo": 33764, - "Ġreminis": 33765, - "rän": 33766, - "ĠMorrison": 33767, - "Ġlinha": 33768, - "Ġbreaths": 33769, - "ĠTaste": 33770, - "Ġenfrent": 33771, - "ĠDocker": 33772, - "Ġден": 33773, - "Ġethnicity": 33774, - "Ġwob": 33775, - "Ġsuffers": 33776, - "Ġtransitioning": 33777, - "ĠRange": 33778, - "ÄĻdzy": 33779, - "ĠкаÑĤ": 33780, - "Ġsyner": 33781, - "Ġdonut": 33782, - "Ġprobabilities": 33783, - "ĠOmar": 33784, - "Which": 33785, - "uish": 33786, - "isin": 33787, - "Ġdemos": 33788, - "ĠìłĢ기": 33789, - "Ġëĺijê°Ļ": 33790, - "Ġедин": 33791, - "Ġcerve": 33792, - "Ġjoka": 33793, - "IAN": 33794, - "Ġkilometer": 33795, - "Ġhorizontally": 33796, - "ĠBhag": 33797, - "Ġ->": 33798, - "ĠMonitor": 33799, - "Ġknowledgeable": 33800, - "Ġfav": 33801, - "Ġpinned": 33802, - "ĠeBay": 33803, - "icker": 33804, - "Ġìŀłê¹IJë§Į": 33805, - "ĠXiaomi": 33806, - "Ġcapit": 33807, - "Ġnp": 33808, - "Ġ1965": 33809, - "hoe": 33810, - "Ġnok": 33811, - "ĠSage": 33812, - "ĠнелÑĮзÑı": 33813, - "ĠTow": 33814, - "gam": 33815, - "Ġdicen": 33816, - "ĠSUBSCRIBE": 33817, - "Ġreboot": 33818, - "Ġpaj": 33819, - "Ġë³´ìŬë": 33820, - "Ġthicken": 33821, - "ĠReality": 33822, - "idän": 33823, - "Na": 33824, - "Ġê²ĥìĿĢ": 33825, - "!!)": 33826, - "Ġroutines": 33827, - "Ġодного": 33828, - "Ġexting": 33829, - "Ġì¦Ŀ": 33830, - "Ġsulfur": 33831, - "Ġcarve": 33832, - "Ġasteroid": 33833, - "ĠWarrior": 33834, - "Ġphotographers": 33835, - "Ġpell": 33836, - "Ġcrossover": 33837, - "æĪijçŁ¥éģĵ": 33838, - "Ġhacemos": 33839, - "ĠNej": 33840, - "Ġsettling": 33841, - "Ġirm": 33842, - "ĠBooks": 33843, - "ientôt": 33844, - "Ġespacio": 33845, - "ĠScholars": 33846, - "Ġdoomed": 33847, - "ĠIRS": 33848, - "wohl": 33849, - "Ġsegue": 33850, - "ĠëĪĦê°Ģ": 33851, - "Ġpratic": 33852, - "BT": 33853, - "ĠConsidering": 33854, - "ĠBuffalo": 33855, - "Ġtrainings": 33856, - "Ġgebru": 33857, - "ĠGleich": 33858, - "Ġpirates": 33859, - "Ġenvelop": 33860, - "Ġreopen": 33861, - "imat": 33862, - "Ġtee": 33863, - "Ġsued": 33864, - "feh": 33865, - "Ġ×Ķק": 33866, - "Ġdiets": 33867, - "Ġjuntos": 33868, - "asto": 33869, - "Ġmisunderstood": 33870, - "Ġruim": 33871, - "Ġclassify": 33872, - "ĠпÑĢодÑĥк": 33873, - "Ġinse": 33874, - "Ġillustrated": 33875, - "Ġcorrosion": 33876, - "Ġaccred": 33877, - "ĠAuntie": 33878, - "ĠпÑĢивеÑĤ": 33879, - "ĠLIVE": 33880, - "Ġrek": 33881, - "Ġreceipt": 33882, - "åĪ°åºķ": 33883, - "ĠBarbie": 33884, - "ĠSnake": 33885, - "turn": 33886, - "Jeff": 33887, - "ãģĬãģĬ": 33888, - "ķĦ": 33889, - "VOICEOVER": 33890, - "coll": 33891, - "Ġrunners": 33892, - "ìłľë": 33893, - "osos": 33894, - "moon": 33895, - "Ġkeynote": 33896, - "ĠInstit": 33897, - "SPEAK": 33898, - "Ġplugs": 33899, - "Ġcurv": 33900, - "ĠYuri": 33901, - "ĠTheres": 33902, - "ĠPs": 33903, - "ĠμÏĢο": 33904, - "Ġconverter": 33905, - "Ġrefine": 33906, - "Ġbadass": 33907, - "Ġοι": 33908, - "Ġregen": 33909, - "azzi": 33910, - "ÙĬÙģ": 33911, - "Ġseized": 33912, - "Ġiçer": 33913, - "ilee": 33914, - "Ġupstream": 33915, - "Ġbuds": 33916, - "Ġpim": 33917, - "Ġíķĺ루": 33918, - "Ġalluded": 33919, - "Ġthemed": 33920, - "Ġconsisting": 33921, - "Ġbons": 33922, - "unuz": 33923, - "ĠпÑĢовод": 33924, - "ĠLovely": 33925, - "à¥ĭ": 33926, - "Ġparach": 33927, - "ĠStaats": 33928, - "éļĬ": 33929, - "Ġselective": 33930, - "Ġfase": 33931, - "ĠGeorget": 33932, - "Ġcocaine": 33933, - "Ġreproduction": 33934, - "ĠLara": 33935, - "ĠLD": 33936, - "Ġgh": 33937, - "Jon": 33938, - "ĠlÃ¥": 33939, - "ĠëijIJë": 33940, - "Ġtyped": 33941, - "ĠBana": 33942, - "ëĵľë": 33943, - "Ġsavory": 33944, - "ĠZomb": 33945, - "standen": 33946, - "Ġpedestrian": 33947, - "Ġdifférents": 33948, - "Ġìĭ¸": 33949, - "èī¯": 33950, - "Ġcomplained": 33951, - "ç¦ı": 33952, - "ĠÐļÑĤо": 33953, - "Ġ׾פ": 33954, - "aliÅĽmy": 33955, - "Ġmortar": 33956, - "Ġverdict": 33957, - "Ġsuficiente": 33958, - "ĠMillion": 33959, - "mittel": 33960, - "inals": 33961, - "ĠاÙĦØ®": 33962, - "аÑİÑģÑĮ": 33963, - "ĠmiÄĻdzy": 33964, - "ĠOle": 33965, - "Ġinvert": 33966, - "czyÄĩ": 33967, - "озможно": 33968, - "starter": 33969, - "Ġauditor": 33970, - "ĠScout": 33971, - "chien": 33972, - "ĠSverige": 33973, - "uffled": 33974, - "Ġzehn": 33975, - "ĠAuckland": 33976, - "Ġargent": 33977, - "Ġ1976": 33978, - "ĠHoe": 33979, - "Ġbothers": 33980, - "Ġsocialist": 33981, - "Ġpliers": 33982, - "Ġemergen": 33983, - "ĠXP": 33984, - "еÑĢов": 33985, - "More": 33986, - "ĠLevi": 33987, - "ĠAnders": 33988, - "ibilidad": 33989, - "ĠParents": 33990, - "Ġinduced": 33991, - "ìĸ´ì¤": 33992, - "Ġbalances": 33993, - "ĠвÑĭÑĪ": 33994, - "Ġsubmarine": 33995, - "Start": 33996, - "Ġdries": 33997, - "Ġvolver": 33998, - "Ġticking": 33999, - "cott": 34000, - "Ġfaj": 34001, - "prés": 34002, - "ĠSabb": 34003, - "ĠзаÑĩ": 34004, - "ĠпокÑĥп": 34005, - "Ġbaptized": 34006, - "ĠBrilliant": 34007, - "ĠÐijог": 34008, - "Ġmots": 34009, - "bits": 34010, - "Ġlattice": 34011, - "æĪijè·Łä½ł": 34012, - "Ġcoriander": 34013, - "Ġresidency": 34014, - "ync": 34015, - "Ġpierwszy": 34016, - "ĠKnock": 34017, - "ĠZap": 34018, - "ĠÐķв": 34019, - "견": 34020, - "å°ıå¿ĥ": 34021, - "Ġuneven": 34022, - "ĠJas": 34023, - "odor": 34024, - "ç¿Ĵ": 34025, - "74": 34026, - "ĠSite": 34027, - "Ġaconteceu": 34028, - "ympt": 34029, - "Ġtrilogy": 34030, - "Ġlantern": 34031, - "ĠZucker": 34032, - "vari": 34033, - "welling": 34034, - "ĠPotato": 34035, - "gomery": 34036, - "Ġreacted": 34037, - "ĠChron": 34038, - "Ġjede": 34039, - "beeld": 34040, - "Ġtwent": 34041, - "Ġlact": 34042, - "æ¨Ĥ": 34043, - "Ġrése": 34044, - "Ġrelent": 34045, - "Ġfurnace": 34046, - "Ġwidget": 34047, - "Ġearthquakes": 34048, - "ĠAdjust": 34049, - "ilit": 34050, - "ĠØ£ÙĪ": 34051, - "Ġhearings": 34052, - "Ġdefendant": 34053, - "irsiniz": 34054, - "Ġbask": 34055, - "cja": 34056, - "ľ¨": 34057, - "Ġrifles": 34058, - "Ġinstal": 34059, - "ĠForgive": 34060, - "pical": 34061, - "ĠÐŀÑĩенÑĮ": 34062, - "Ġpetites": 34063, - "Ġhp": 34064, - "Ġrenowned": 34065, - "ĠInn": 34066, - "Ġ주ìĦ¸ìļĶ": 34067, - "Ġemphasized": 34068, - "éĹ®é¢ĺ": 34069, - "ĠìŀĪì£ł": 34070, - "Ġê²ĥìľ¼ë¡ľ": 34071, - "ãĤĨ": 34072, - "Åĵ": 34073, - "gili": 34074, - "Dave": 34075, - "Ġexhausting": 34076, - "ÅĤug": 34077, - "Ġschema": 34078, - "μά": 34079, - "cycl": 34080, - "Ġautant": 34081, - "Ġparcel": 34082, - "Ġmateria": 34083, - "ĠBerry": 34084, - "ĠÑģами": 34085, - "Ġextracted": 34086, - "ĠSaying": 34087, - "ismatic": 34088, - "ĠпопÑĢоб": 34089, - "Ġneuron": 34090, - "graph": 34091, - "ľë©´": 34092, - "Ġenclosure": 34093, - "ĠJohann": 34094, - "Ġaftermath": 34095, - "ÑĤоб": 34096, - "Ġuży": 34097, - "Ġsamp": 34098, - "360": 34099, - "ĠMei": 34100, - "Ġtaco": 34101, - "Ġreceptors": 34102, - "Ġpunches": 34103, - "ĠHoje": 34104, - "ĠÙĩÙĨا": 34105, - "=\"#": 34106, - "ĠAngular": 34107, - "Ġmusique": 34108, - "Ġrol": 34109, - "Ġñ": 34110, - "sterreich": 34111, - "Ġclam": 34112, - "ĠTreasury": 34113, - "chemical": 34114, - "Ġapar": 34115, - "Ġappend": 34116, - "Ġforbid": 34117, - "ĠHamburg": 34118, - "аков": 34119, - "Ġê¸Ī": 34120, - "ilda": 34121, - "Ġpreparations": 34122, - "ĠmogÄħ": 34123, - "Ġcamino": 34124, - "Eric": 34125, - "ĠBlind": 34126, - "èĪĩ": 34127, - "å¹´çļĦ": 34128, - "ĠDiscovery": 34129, - "ì¸ł": 34130, - "çĪ¶": 34131, - "Ġinterpreter": 34132, - "Ġbred": 34133, - "ĠPsalm": 34134, - "Ġdefended": 34135, - "ìī¬": 34136, - "ĠErfahr": 34137, - "ĠPeach": 34138, - "Ġmoons": 34139, - "ĠOst": 34140, - "Ġspécial": 34141, - "Ġarriver": 34142, - "ĠWis": 34143, - "uci": 34144, - "Ġrobotics": 34145, - "IVE": 34146, - "Ġsiege": 34147, - "arla": 34148, - "Ġseparates": 34149, - "ĠTC": 34150, - "íı°": 34151, - "quisite": 34152, - "Ġparentheses": 34153, - "ике": 34154, - "ç«Ļ": 34155, - "Ġtrous": 34156, - "建": 34157, - "ĠÑģилÑĮ": 34158, - "Ġbeers": 34159, - "ĠплаÑĤ": 34160, - "ãģĻãģĶãģĦ": 34161, - "Ġsola": 34162, - "Ġdès": 34163, - "mingham": 34164, - "ikte": 34165, - "Ġoops": 34166, - "Ġtwitch": 34167, - "å°ĩ": 34168, - "ÏĪ": 34169, - "ĠShouldn": 34170, - "uvre": 34171, - "Ġleer": 34172, - "criptions": 34173, - "Ġeyeshadow": 34174, - "ĠGuo": 34175, - "ĠPowell": 34176, - "Ġsupuesto": 34177, - "Ġana": 34178, - "rals": 34179, - "ĠMontreal": 34180, - "Ġsurfing": 34181, - "ĠÐŁÐµÑĢв": 34182, - "×ŀ×ķ": 34183, - "Ġmilliseconds": 34184, - "Ġsuburbs": 34185, - "Ġplaneta": 34186, - "ÑĥÑĪка": 34187, - "hrlich": 34188, - "ĠHY": 34189, - "ĠسÛĴ": 34190, - "ĠMM": 34191, - "ĠEff": 34192, - "åı¯æĦĽ": 34193, - "ĠHS": 34194, - "anson": 34195, - "Ġì§ģìłij": 34196, - "Ġsuo": 34197, - "Ġdeploying": 34198, - "Ġkunt": 34199, - "tering": 34200, - "Ġerect": 34201, - "ìŀ¥ìĿ´": 34202, - "ĠìĿĮìĭĿ": 34203, - "Ġspecimen": 34204, - "!...": 34205, - "æĪij說": 34206, - "Ġligne": 34207, - "Ġkonst": 34208, - "adequ": 34209, - "Ġìĥģíĥľ": 34210, - "Ġaccessed": 34211, - "ĠPole": 34212, - "kill": 34213, - "Ġë²Ħë": 34214, - "Ġauthenticity": 34215, - "Ġappelle": 34216, - "ulle": 34217, - "Ġrevision": 34218, - "Ġgoats": 34219, - "гли": 34220, - "Ġpau": 34221, - "ĠRanger": 34222, - "ĠImag": 34223, - "author": 34224, - "Ġeve": 34225, - "ĠMessenger": 34226, - "Ġnay": 34227, - "Ġwholes": 34228, - "ätte": 34229, - "Ġonwards": 34230, - "ĠDepois": 34231, - "ĠíijľíĺĦ": 34232, - "ĠSARS": 34233, - "Ġwszystkich": 34234, - "Ġdestru": 34235, - "umbing": 34236, - "Ġcompatibility": 34237, - "Ġmisinformation": 34238, - "odore": 34239, - "ĠFavor": 34240, - "eko": 34241, - "ıĮ": 34242, - "waukee": 34243, - "ĠTeaching": 34244, - "ĠKO": 34245, - "Ġbetting": 34246, - "Ġquests": 34247, - "Ġvivre": 34248, - "ĠмÑĥзÑĭ": 34249, - "Ġsaga": 34250, - "Ġswell": 34251, - "Ġgehe": 34252, - "æĢİ麼樣": 34253, - "ĠоÑĢганиз": 34254, - "Ġgide": 34255, - "ĠGross": 34256, - "Ġdalej": 34257, - "Ġclaws": 34258, - "á»Ļc": 34259, - "Ġprejudice": 34260, - "Ġinsign": 34261, - "ihood": 34262, - "Ġpled": 34263, - "Ġdónde": 34264, - "ĠPolitical": 34265, - "Ġpremises": 34266, - "undert": 34267, - "عت": 34268, - "onnen": 34269, - "Ġespaço": 34270, - "Ġfé": 34271, - "ĠHarrison": 34272, - "ĠCensus": 34273, - "Ġcardio": 34274, - "Ġdiy": 34275, - "Ġmilieu": 34276, - "Ġjournée": 34277, - "ĠRelease": 34278, - "NIE": 34279, - "ĠMuk": 34280, - "idée": 34281, - "á»įi": 34282, - "Ġiçinde": 34283, - "ŀĻ": 34284, - "Ġresonate": 34285, - "Ġmoles": 34286, - "ĠFlying": 34287, - "ĠGloria": 34288, - "ĠPastor": 34289, - "ĠArena": 34290, - "好ä¸į好": 34291, - "NON": 34292, - "олов": 34293, - "ĠallÃŃ": 34294, - "omat": 34295, - "ìĸ´ëıĦ": 34296, - "ĠcaracterÃŃst": 34297, - "Ġdeclining": 34298, - "ÑĸÑı": 34299, - "anco": 34300, - "ĠInform": 34301, - "Ġbargain": 34302, - "Ġbushes": 34303, - "ĠNaturally": 34304, - "Ġrechts": 34305, - "ĠTensor": 34306, - "ĠPatricia": 34307, - "Ġprincipio": 34308, - "ĠMumbai": 34309, - "Ġwomb": 34310, - "Ġnostra": 34311, - "Ġdilemma": 34312, - "Ġirgendwann": 34313, - "Ġ1964": 34314, - "ĠenergÃŃa": 34315, - "ĠнаÑĢ": 34316, - "Ġsegregation": 34317, - "ĠAthlet": 34318, - "Ġ»,": 34319, - "Ġyeni": 34320, - "ĠSeit": 34321, - "Ġvenom": 34322, - "Ġdakika": 34323, - "ĠëıĮë": 34324, - "ĠÃīl": 34325, - "Ġfus": 34326, - "ĠMog": 34327, - "¦½ëĭĪëĭ¤": 34328, - "Ġremar": 34329, - "ĠTeddy": 34330, - "Ġbreasts": 34331, - "icans": 34332, - "æĶ¶çľĭ": 34333, - "kap": 34334, - "ĠhÆ¡n": 34335, - "ĠJP": 34336, - "ãĥ³ãĤ¿": 34337, - "Ġresurrect": 34338, - "ĠìĿ¸ë": 34339, - "herical": 34340, - "Ġfotograf": 34341, - "ĠJosé": 34342, - "Ġlivelihood": 34343, - "Ġbibli": 34344, - "teri": 34345, - "Ġvorstellen": 34346, - "ĠAAA": 34347, - "Ġassessing": 34348, - "YA": 34349, - "Ġsplend": 34350, - "Ġexcav": 34351, - "Ġbaptism": 34352, - "yll": 34353, - "wow": 34354, - "Mac": 34355, - "Ġplastics": 34356, - "teokbokki": 34357, - "Ġintéressant": 34358, - "Ġcommanded": 34359, - "Ġfamously": 34360, - "ĠÐĺли": 34361, - "ĠManuel": 34362, - "Ġsouthwest": 34363, - "Ġdeformation": 34364, - "ÃŃculo": 34365, - "ĠнаÑħодиÑĤÑģÑı": 34366, - "ĠPatter": 34367, - "degree": 34368, - "ĠczÄĻsto": 34369, - "\"-": 34370, - "Ġìħĭ": 34371, - "Ġmanger": 34372, - "ĠTrustee": 34373, - "Ģ리": 34374, - "Ġpuntos": 34375, - "ivable": 34376, - "Ġvolatile": 34377, - "ĠëĬIJ": 34378, - "Ġinstability": 34379, - "Ġciel": 34380, - "ciÄħ": 34381, - "Ġpurity": 34382, - "ноÑģÑĤ": 34383, - "Sil": 34384, - "edar": 34385, - "åĻ¨": 34386, - "NOUNCER": 34387, - "Ġspelled": 34388, - "GER": 34389, - "Ġsanctuary": 34390, - "Ġaccelerating": 34391, - "Ġscout": 34392, - "ĠпÑĢев": 34393, - "fahren": 34394, - "ãģĵãģ¡ãĤī": 34395, - "ĠëĤĺìĺ¨": 34396, - "ĠpoczÄħt": 34397, - "ĠMeu": 34398, - "kaar": 34399, - "³´ê³ł": 34400, - "akra": 34401, - "Down": 34402, - "ĠÃĦr": 34403, - "ĠElite": 34404, - "Ġallons": 34405, - "Ġmayonnaise": 34406, - "ĠSustain": 34407, - "prisingly": 34408, - "Ġsupervis": 34409, - "Ġê·¸ëłĩì£ł": 34410, - "Ġunemployed": 34411, - "Ġfreshly": 34412, - "Ġ×ŀ×¢": 34413, - "ĠDh": 34414, - "Ġtackling": 34415, - "Ġogr": 34416, - "Ġì´Īë": 34417, - "ãĤĪãĤį": 34418, - "Ġloft": 34419, - "arah": 34420, - "ĠAirl": 34421, - "ĠDir": 34422, - "ĠÐľÐ¾Ð¶Ð½Ð¾": 34423, - "Ġbooking": 34424, - "ĠCRA": 34425, - "Ġhttps": 34426, - "Ġchoke": 34427, - "Ġgown": 34428, - "Ġnoite": 34429, - "Ġzac": 34430, - "istol": 34431, - "Ġsecre": 34432, - "Ġresembles": 34433, - "Ġcuad": 34434, - "ìĤ¬ê°Ģ": 34435, - "show": 34436, - "Ġblanc": 34437, - "Ġagu": 34438, - "ĠPrint": 34439, - "asted": 34440, - "ĠWeather": 34441, - "ipl": 34442, - "Ġobscure": 34443, - "Ġconte": 34444, - "oughs": 34445, - ");": 34446, - "ĠDame": 34447, - "ä¸Ģ缴": 34448, - "Ġclarification": 34449, - "Ġintimacy": 34450, - "Ġuphold": 34451, - "ĠMirror": 34452, - "Ġwagon": 34453, - "xide": 34454, - "Ġclog": 34455, - "apper": 34456, - "ĠImmediately": 34457, - "úde": 34458, - "Ġtouchdown": 34459, - "Ġrooft": 34460, - "аÑĪа": 34461, - "Ġçıkt": 34462, - "Ġlaisser": 34463, - "ĠUnreal": 34464, - "ensitive": 34465, - "Ġ123": 34466, - "Ġplaster": 34467, - "Ġducks": 34468, - "Ġetme": 34469, - "Ġbishop": 34470, - "brevi": 34471, - "Ġbic": 34472, - "ä¸ĭåİ»": 34473, - "Ġruntime": 34474, - "Ġambitions": 34475, - "маÑĤ": 34476, - "ĠWein": 34477, - "ĠMari": 34478, - "ĠíĬ¸ë": 34479, - "Ġresolver": 34480, - "ĠngÃły": 34481, - "ĠRise": 34482, - "ãĤĪãģĨãģ«": 34483, - "ĠCrus": 34484, - "Ġmerchandise": 34485, - "Ġeli": 34486, - "Ġstatewide": 34487, - "Ġowl": 34488, - "éģł": 34489, - "æĶ¹": 34490, - "Ġtwisting": 34491, - "Ġcontaminated": 34492, - "ĠCommerce": 34493, - "hythm": 34494, - "ĠÃĪ": 34495, - "Ġìĭ¤ë": 34496, - "Ġmusste": 34497, - "uir": 34498, - "Ġsums": 34499, - "ĠSomewhere": 34500, - "ãĥİ": 34501, - "Ġkami": 34502, - "Ġaired": 34503, - "ĠANDREW": 34504, - "Ġêº": 34505, - "Ġviendo": 34506, - "Ġantibody": 34507, - "Ġabsolument": 34508, - "Ġprotesters": 34509, - "ĠQuébec": 34510, - "stadt": 34511, - "Shaun": 34512, - "Ġchambers": 34513, - "ĠWear": 34514, - "ĠEffects": 34515, - "Ġhazards": 34516, - "Ġnei": 34517, - "Ġcorazón": 34518, - "Ġá¼": 34519, - "ĠSG": 34520, - "Ķ©": 34521, - "ĠìĹŃìĭľ": 34522, - "Ġcomfy": 34523, - "ĠCody": 34524, - "Ġpensando": 34525, - "Ġganska": 34526, - "ĠAcross": 34527, - "öllig": 34528, - "abyte": 34529, - "Ġwedge": 34530, - "Ġkalian": 34531, - "Ġsigue": 34532, - "endes": 34533, - "ĠGroÃŁ": 34534, - "Ġutiliser": 34535, - "Ġflown": 34536, - "аниÑİ": 34537, - "Ġlevar": 34538, - "restrial": 34539, - "Ġillustrations": 34540, - "Ġaslında": 34541, - "BLEEP": 34542, - "ĠдоÑģÑĤ": 34543, - "Ġturret": 34544, - "Ġsuitcase": 34545, - "ziÄĻki": 34546, - "Ġsketches": 34547, - "Ġacred": 34548, - "ĠRei": 34549, - "Ġtsun": 34550, - "ĠSag": 34551, - "Ġthirds": 34552, - "ĠKIRBY": 34553, - "rai": 34554, - "Ġhumanos": 34555, - "Ġrecommends": 34556, - "Ġextraordinarily": 34557, - "Ġcommencement": 34558, - "KN": 34559, - "opez": 34560, - "Ġ×ijש": 34561, - "Ġlethal": 34562, - "ĠEstamos": 34563, - "Ġinspector": 34564, - "ĠSeok": 34565, - "eun": 34566, - "Ġoffshore": 34567, - "Ġgettin": 34568, - "years": 34569, - "ĠSilence": 34570, - "ĠNatur": 34571, - "upun": 34572, - "Ġtrzy": 34573, - "Ġnoget": 34574, - "Ġhamburger": 34575, - "ĠPraise": 34576, - "énd": 34577, - "Ġ1971": 34578, - "ylie": 34579, - "krit": 34580, - "ĠìĥĿê°ģìĿ´": 34581, - "çļ®": 34582, - "Ġmomentos": 34583, - "Ġesté": 34584, - "Ġdissemin": 34585, - "Ġgigs": 34586, - "Ġdesaf": 34587, - "Ġavis": 34588, - "ĠZoo": 34589, - "ĠìķĬìĿĢ": 34590, - "häng": 34591, - "åı¥": 34592, - "hake": 34593, - "ĠBism": 34594, - "Ġrethink": 34595, - "ĠMalcolm": 34596, - "Ġidentifies": 34597, - "lower": 34598, - "ixel": 34599, - "ĠtvÃ¥": 34600, - "ked": 34601, - "ierz": 34602, - "Ġöffentlich": 34603, - "Ġproclaim": 34604, - "soon": 34605, - "lol": 34606, - "Ġloi": 34607, - "Ġbitten": 34608, - "rollo": 34609, - "Ġsermon": 34610, - "Ġesqu": 34611, - "Ġjackets": 34612, - "Ġgráfic": 34613, - "ĠпоказÑĭв": 34614, - "Ġcabeza": 34615, - "chodzi": 34616, - "Ġpelvis": 34617, - "Ġnostalgia": 34618, - "Ġbrew": 34619, - "Ġshortcuts": 34620, - "ĠAdemás": 34621, - "Ġsuperficial": 34622, - "åħ©åĢĭ": 34623, - "Ġboca": 34624, - "ĠæĪijæĺ¯": 34625, - "imentos": 34626, - "åĽłä¸º": 34627, - "Ġsprouts": 34628, - "é£Ľ": 34629, - "ĠJonas": 34630, - "ĠFlorence": 34631, - "static": 34632, - "daughter": 34633, - "*)": 34634, - "ÅĤby": 34635, - "fashion": 34636, - "ĠGinger": 34637, - "Ġ매ë": 34638, - "Ġhustle": 34639, - "utos": 34640, - "ĠÑĤÑıж": 34641, - "ĠLös": 34642, - "ש×Ļ×Ŀ": 34643, - "anych": 34644, - "tuber": 34645, - "Ġtidy": 34646, - "Ġfrontal": 34647, - "Ġwhiskey": 34648, - "Ġhumid": 34649, - "ĠÎŁ": 34650, - "Ġridge": 34651, - "Ġmarin": 34652, - "Ġbientôt": 34653, - "ĠCarrie": 34654, - "chw": 34655, - "Ġtahun": 34656, - "ĠErgeb": 34657, - "FR": 34658, - "Ġìłķë¶Ģ": 34659, - "ĠSoldier": 34660, - "Ġenlightenment": 34661, - "Ġexamining": 34662, - "ĠNotre": 34663, - "Ġeram": 34664, - "ĠSunny": 34665, - "Ġlayered": 34666, - "ĠDazu": 34667, - "rades": 34668, - "好åIJĥ": 34669, - "ĠнаÑĪей": 34670, - "Ġtimber": 34671, - "Ġmanners": 34672, - "ĠBirmingham": 34673, - "Ġminiature": 34674, - "ometers": 34675, - "Ġfiller": 34676, - "ĠRip": 34677, - "ĠKomb": 34678, - "owner": 34679, - "ì¿": 34680, - "idian": 34681, - "Ġdemás": 34682, - "ĠÙĪت": 34683, - "Ġprecautions": 34684, - "Ġgoverno": 34685, - "zelf": 34686, - "ĠComplete": 34687, - "å¸ĥ": 34688, - "ĠPhantom": 34689, - "ãģ¾ãģļ": 34690, - "Ġнез": 34691, - "ĠкаÑĢÑĤ": 34692, - "ĠAntwort": 34693, - "ĠPfizer": 34694, - "ĠFranco": 34695, - "ĠwÅĤ": 34696, - "Ġfrig": 34697, - "esper": 34698, - "Ġkale": 34699, - "Ġfilmmaker": 34700, - "Ġkurt": 34701, - "Ġinvalid": 34702, - "å±Ģ": 34703, - "arella": 34704, - "Äĥng": 34705, - "ramento": 34706, - "Ġnutritional": 34707, - "Ġdictators": 34708, - "Ġafin": 34709, - "Ġfuzzy": 34710, - "ĠGina": 34711, - "ót": 34712, - "ĠExtremadura": 34713, - "Ġdemonstrations": 34714, - "ĠMontgomery": 34715, - "íķ´ìĦ¤": 34716, - "ĠGandhi": 34717, - "ãĥĿ": 34718, - "ç½®": 34719, - "Ġreunion": 34720, - "ĠjakiÅĽ": 34721, - "ĠZug": 34722, - "OUGH": 34723, - "lifting": 34724, - "Ġà²": 34725, - "á¹Ľá¹£": 34726, - "eb": 34727, - "ĠWOW": 34728, - "ĠShiva": 34729, - "ometry": 34730, - "Ġwildly": 34731, - "Ġtended": 34732, - "Ġmegap": 34733, - "ì²ĺ": 34734, - "Ġnause": 34735, - "Ġgerek": 34736, - "ãĥĭ": 34737, - "ĠMarcel": 34738, - "Ġneste": 34739, - "خر": 34740, - "Ġfeh": 34741, - "åĨħ": 34742, - "suspenseful": 34743, - "ĠWrestle": 34744, - "ĠPalestinians": 34745, - "ĠGORD": 34746, - "iyet": 34747, - "ĠÑĢади": 34748, - "Ġversuchen": 34749, - "Ġtransistor": 34750, - "ĠÐŁÑĢоÑģÑĤо": 34751, - "ĠпонÑĢав": 34752, - "Ġrhyme": 34753, - "ĠVermont": 34754, - "platz": 34755, - "è®°": 34756, - "ĠÄ°ÅŁte": 34757, - "ĠHag": 34758, - "ĠÐĺм": 34759, - "ĠÑĢаÑģÑģказ": 34760, - "Ġmetros": 34761, - "ĠInfinity": 34762, - "wolf": 34763, - "ibal": 34764, - "ftig": 34765, - "ĠÚĨ": 34766, - "Ġíĺ¹ìĭľ": 34767, - "Ġoggi": 34768, - "Ġdisposit": 34769, - "ĠпÑĢил": 34770, - "ĠвÑĭпол": 34771, - "Ġthôi": 34772, - "ĠKENN": 34773, - "Ġhanding": 34774, - "actus": 34775, - "Ġtacos": 34776, - "Ġformerly": 34777, - "ĠCorinthians": 34778, - "ãģ«ãģ¯": 34779, - "ÑĨÑĸÑĹ": 34780, - "Ġpadre": 34781, - "Ġcongregation": 34782, - "æij": 34783, - "fert": 34784, - "Ġsubir": 34785, - "aiser": 34786, - "qua": 34787, - "araoh": 34788, - "ĠCurry": 34789, - "ĠìķĬëĬĶ": 34790, - "елÑİ": 34791, - "Ġfuss": 34792, - "Ġbooty": 34793, - "Ġlows": 34794, - "Ġhommes": 34795, - "ĠMH": 34796, - "ĠDisneyland": 34797, - "went": 34798, - "Ġresidue": 34799, - "Ġbeeping": 34800, - "è¼ķ": 34801, - "ätta": 34802, - "Ġmould": 34803, - "ĠProjekt": 34804, - "stalk": 34805, - "Ġartifact": 34806, - "ĠAntrag": 34807, - "ĠAMD": 34808, - "ĠCrypt": 34809, - "Ġë©Ķ": 34810, - "ĠFelipe": 34811, - "ĠCOB": 34812, - "elu": 34813, - "Ġselfies": 34814, - "ĠSanti": 34815, - "chutz": 34816, - "ĠУкÑĢаÑĹ": 34817, - "gesamt": 34818, - "Ġflock": 34819, - "jaz": 34820, - "plain": 34821, - "Ġwrinkles": 34822, - "Ġreais": 34823, - "Ġpaljon": 34824, - "Ġempowerment": 34825, - "Ġattendees": 34826, - "ppa": 34827, - "Ġneden": 34828, - "онÑĭ": 34829, - "Ġtimeframe": 34830, - "ĠCherry": 34831, - "Ġidée": 34832, - "Ġgag": 34833, - "Ġdonkey": 34834, - "Ġông": 34835, - "ĠHare": 34836, - "éļĽ": 34837, - "ĠKara": 34838, - "Ġacompan": 34839, - "places": 34840, - "imientos": 34841, - "ĠHamm": 34842, - "би": 34843, - "uben": 34844, - "iliyor": 34845, - "Ġthirst": 34846, - "Ġkry": 34847, - "ĠGeorgetown": 34848, - "׳×Ķ": 34849, - "Ġorch": 34850, - "Ġheartbeat": 34851, - "Ġtransformations": 34852, - "estones": 34853, - "ĠKH": 34854, - "Ġcartoons": 34855, - "Ġanci": 34856, - "Ġworthless": 34857, - "Ġtailored": 34858, - "pu": 34859, - "Americans": 34860, - "Ġpiles": 34861, - "ĠMonkey": 34862, - "Ġbasin": 34863, - "ĠTemper": 34864, - "ĠPaint": 34865, - "Ġpunching": 34866, - "Ġbaik": 34867, - "ĠOakland": 34868, - "vre": 34869, - "ÅŁallah": 34870, - "ydd": 34871, - "Ġcasually": 34872, - "odu": 34873, - "Ġcoded": 34874, - "ĠNorwegian": 34875, - "ĠVince": 34876, - "Ġpremature": 34877, - "ĠPromise": 34878, - "екÑģÑĤ": 34879, - "Ġdevastated": 34880, - "ĠPremium": 34881, - "ĠParam": 34882, - "ĠÃĸyle": 34883, - "umuz": 34884, - "PO": 34885, - "rators": 34886, - "Ġlamps": 34887, - "Ġterritorial": 34888, - "Ġbackbone": 34889, - "listed": 34890, - "DY": 34891, - "ĠاÙĦر": 34892, - "Ġpursued": 34893, - "ĠCommons": 34894, - "Ġ곡": 34895, - "locks": 34896, - "edor": 34897, - "Ġconceived": 34898, - "gere": 34899, - "Ġdisappearing": 34900, - "ĠSull": 34901, - "ĠìĹ°ë": 34902, - "Ġhoffe": 34903, - "Ġdetox": 34904, - "íĶĮ": 34905, - "Ġretir": 34906, - "ĠëģĿëĤ": 34907, - "Ġpergunta": 34908, - "ĠBOY": 34909, - "ç²¾": 34910, - "Ġpenn": 34911, - "æĿ¥äºĨ": 34912, - "hés": 34913, - "hon": 34914, - "Ġcatastrophic": 34915, - "Ġaust": 34916, - "Ġtorso": 34917, - "Ġìĸ´ëĬIJ": 34918, - "ĠìĤ¬ëŀĮëĵ¤ìĿ´": 34919, - "Ġmarvelous": 34920, - "ĠHarley": 34921, - "achine": 34922, - "Ġtiế": 34923, - "itto": 34924, - "ĠIÃŃm": 34925, - "ylon": 34926, - "Ġshutdown": 34927, - ".''": 34928, - "Ġapologies": 34929, - "ĠCommunication": 34930, - "ĠговоÑĢÑİ": 34931, - "ãģĤãĥ¼": 34932, - "âĦ¢": 34933, - "ÃŃveis": 34934, - "acun": 34935, - "Ġretaining": 34936, - "Ġcontradiction": 34937, - "ĠADAM": 34938, - "COM": 34939, - "Bryan": 34940, - "ĠMonsieur": 34941, - "Ġadapting": 34942, - "ШÐIJ": 34943, - "ĠScr": 34944, - "ändert": 34945, - "Ġplaus": 34946, - "ä»Ĭ天çļĦ": 34947, - "Ġonset": 34948, - "Ġassistants": 34949, - "Ġvalves": 34950, - "Ġscatter": 34951, - "ĠRust": 34952, - "awia": 34953, - "Ġreadiness": 34954, - "Ġpais": 34955, - "Ġbible": 34956, - "Ġambiente": 34957, - "ĠамеÑĢик": 34958, - "Ġuncond": 34959, - "Ġkalk": 34960, - "åĬ¨": 34961, - "Ġmoc": 34962, - "unn": 34963, - "Ġactu": 34964, - "Ġhumming": 34965, - "issimo": 34966, - "ĠPatrol": 34967, - "gow": 34968, - "ãĥ¤": 34969, - "ĠTHEY": 34970, - "ĠBoden": 34971, - "ĠBie": 34972, - "Ġreel": 34973, - "ĠÑĥÑģлов": 34974, - "Ġendeavor": 34975, - "ĠPeriod": 34976, - "ustomed": 34977, - "mals": 34978, - "alon": 34979, - "Box": 34980, - "ĠÏĥαÏĤ": 34981, - "Ġomdat": 34982, - "Ġaltre": 34983, - "ĠHeh": 34984, - "kad": 34985, - "Ġprotector": 34986, - "Ġdominance": 34987, - "odynamic": 34988, - "Ġcommunicated": 34989, - "kö": 34990, - "Ġpredecessor": 34991, - "ĠLuk": 34992, - "ĠFlower": 34993, - "Ġãģ©": 34994, - "poque": 34995, - "ÑĤиÑĢов": 34996, - "Ġretrospect": 34997, - "Ġdecisive": 34998, - "Ġexempel": 34999, - "{\\": 35000, - "ĠRück": 35001, - "rite": 35002, - "ĠZeus": 35003, - "Ġcalorie": 35004, - "Ġattractions": 35005, - "ĠHinter": 35006, - "Ġuhm": 35007, - "ĠíĮIJ": 35008, - "Ġrulers": 35009, - "Ġdiscouraged": 35010, - "Ġacontecer": 35011, - "Ġaccents": 35012, - "ĠOptim": 35013, - "ĠAlg": 35014, - "kids": 35015, - "2021": 35016, - "ĠLindsay": 35017, - "Ġfilmmakers": 35018, - "prowad": 35019, - "Ġterug": 35020, - "ëĭ´": 35021, - "ĠSommer": 35022, - "2018": 35023, - "Ġborrowing": 35024, - "ĠTransfer": 35025, - "ноп": 35026, - "arias": 35027, - "Ġheadphone": 35028, - "ì¼ľ": 35029, - "Ġtranslating": 35030, - "Ġaufge": 35031, - "à®ªà®Ł": 35032, - "weis": 35033, - "avant": 35034, - "paid": 35035, - "baby": 35036, - "Ġtoughest": 35037, - "Ġrepeats": 35038, - "ĠTeresa": 35039, - "Lord": 35040, - "Ġacabar": 35041, - "ĠRide": 35042, - "dir": 35043, - "Ġleng": 35044, - "Ġdwa": 35045, - "Ġheadaches": 35046, - "Ġnữa": 35047, - "ĠнаÑģÑĤоÑıÑī": 35048, - "Ġboils": 35049, - "Ġlonging": 35050, - "rias": 35051, - "ório": 35052, - "ĠParadise": 35053, - "ĠSeñor": 35054, - "erdem": 35055, - "Ġreinst": 35056, - "Ġsalaries": 35057, - "Ġinsecurity": 35058, - "ÅĤoÅĽci": 35059, - "ĠабÑģолÑİÑĤно": 35060, - "inken": 35061, - "ĠEddy": 35062, - "udos": 35063, - "Ġdummy": 35064, - "Ðļак": 35065, - "six": 35066, - "Ġinbox": 35067, - "ẩ": 35068, - "People": 35069, - "á»ĵng": 35070, - "Ġorganizers": 35071, - "find": 35072, - "Ġül": 35073, - "ĠCOM": 35074, - "ża": 35075, - "weile": 35076, - "Commentary": 35077, - "íĬ¸ë¥¼": 35078, - "ĠMittel": 35079, - "kus": 35080, - "èĽĭ": 35081, - "न": 35082, - "iral": 35083, - "Ġgarment": 35084, - "ικά": 35085, - "Ġstool": 35086, - "payers": 35087, - "Ġshimmer": 35088, - "ĠOllie": 35089, - "ĠJeżeli": 35090, - "è¿ĺæľī": 35091, - "Ġ1977": 35092, - "Ġjeux": 35093, - "Ġextinct": 35094, - "ĠTransportation": 35095, - "ĠMaker": 35096, - "Ġjohn": 35097, - "Ġrichest": 35098, - "Ġtraumat": 35099, - "Ġliegen": 35100, - "´ë¥¼": 35101, - "è¿ĻéĩĮ": 35102, - "Ġunrest": 35103, - "ĠStraw": 35104, - "æĭľæĭľ": 35105, - "Ġcoma": 35106, - "ĠKristen": 35107, - "ĠÐļонеÑĩно": 35108, - "ĠBryce": 35109, - "ĠÑıкÑĸ": 35110, - "Ġpearls": 35111, - "ĠпонимаÑİ": 35112, - "Ġadditions": 35113, - "Ġasympt": 35114, - "ĠменÑĮÑĪе": 35115, - "Ġscans": 35116, - "Child": 35117, - "ĠHide": 35118, - "кÑĥÑİ": 35119, - "etas": 35120, - "Ġdank": 35121, - "Ġpleas": 35122, - "Ġessays": 35123, - "Ġjets": 35124, - "åħĴ": 35125, - "Ġвед": 35126, - "Ġpositives": 35127, - "hof": 35128, - "-)": 35129, - "zzo": 35130, - "Ġstarters": 35131, - "Ġsmiled": 35132, - "Ġ1944": 35133, - "quiera": 35134, - "Ġrok": 35135, - "Ġpuesto": 35136, - "Nico": 35137, - "Ġsimulations": 35138, - "Ġà¶": 35139, - "Ġintrigued": 35140, - "ĠOverwatch": 35141, - "åĸĤ": 35142, - "sigh": 35143, - "bai": 35144, - "Ġë§IJê³ł": 35145, - "idé": 35146, - "Ġcrabs": 35147, - "áºŃp": 35148, - "ĠIraqi": 35149, - "ìĿ´ë¥¼": 35150, - "ÑĤÑı": 35151, - "ĠSophia": 35152, - "ĠDNS": 35153, - "Ġönemli": 35154, - "ĠLuo": 35155, - "Ŀ¤": 35156, - "ĠCounsel": 35157, - "ligen": 35158, - "анÑĮÑĪе": 35159, - "Ġtrumpet": 35160, - "Ġdapat": 35161, - "ĠJM": 35162, - "ĠEVERY": 35163, - "Ġå°įä¸įå°į": 35164, - "夢": 35165, - "ĠLayer": 35166, - "Ġcô": 35167, - "нал": 35168, - "ĠJoo": 35169, - "ĠHack": 35170, - "Ġsunt": 35171, - "ĠLeonard": 35172, - "ĠFirebase": 35173, - "änger": 35174, - "Ġexploding": 35175, - "voy": 35176, - "Ġì¦IJ": 35177, - "ĠÑģеÑĢÑĮ": 35178, - "Ġseverity": 35179, - "Ġbestimm": 35180, - "çµIJæŀľ": 35181, - "Ġtiring": 35182, - "Ġprocurement": 35183, - "Ġdiplomacy": 35184, - "Ġdecorative": 35185, - "ĠÙĬا": 35186, - "Ġpenetration": 35187, - "Õ«": 35188, - "Ġoutright": 35189, - "ENE": 35190, - "ĠUni": 35191, - "odles": 35192, - "Ġzeros": 35193, - "Ġdelightful": 35194, - "jm": 35195, - "Ġdopo": 35196, - "没äºĭ": 35197, - "Ġpositivity": 35198, - "ĠVISTA": 35199, - "ĠResource": 35200, - "íĥĢë": 35201, - "ÑĪие": 35202, - "Carl": 35203, - "Ġpiping": 35204, - "Ġchopping": 35205, - "ĠGanze": 35206, - "üss": 35207, - "ĠAo": 35208, - "Ġshattered": 35209, - "ĠDetective": 35210, - "Ġundoubtedly": 35211, - "Ġhalluc": 35212, - "Ġench": 35213, - "ÑĭÑĩно": 35214, - "ÑĥлÑıÑĢ": 35215, - "isesti": 35216, - "Ġpedals": 35217, - "Ġdurum": 35218, - "¤íĶ": 35219, - "laimer": 35220, - "Ġpropre": 35221, - "Cu": 35222, - "Ġtranslator": 35223, - "ĠcaÅĤ": 35224, - "Ġ그걸": 35225, - "ĠcaÅĤy": 35226, - "UA": 35227, - "Ġrevised": 35228, - "Ġподоб": 35229, - "ĠArticle": 35230, - "ĠHaiti": 35231, - "ĠÃĵ": 35232, - "ĠCtrl": 35233, - "Ġrozm": 35234, - "lait": 35235, - "Ġletzte": 35236, - "ispering": 35237, - "display": 35238, - "Ġaluminium": 35239, - "Ġpalabras": 35240, - "Ġconocer": 35241, - "Ġzitten": 35242, - "Ġdirig": 35243, - "åıªæľī": 35244, - "Ġbrainstorm": 35245, - "Ġwifi": 35246, - "ĠParticip": 35247, - "Ġviewpoint": 35248, - "ĠQuan": 35249, - "Ġhierarch": 35250, - "Welcome": 35251, - "対": 35252, - "Ġoffen": 35253, - "ĠRecovery": 35254, - "gano": 35255, - "Would": 35256, - "Ġrepro": 35257, - "Ġperceptions": 35258, - "Ġdemasi": 35259, - "ĠBangladesh": 35260, - "ĠIncredible": 35261, - "Ġletzt": 35262, - "Ġbehaving": 35263, - "Ġastonishing": 35264, - "ĠâĨ": 35265, - "ĠëĤ¨ìŀIJ": 35266, - "èµ°äºĨ": 35267, - "ãĥĶ": 35268, - "ĠGORDON": 35269, - "CAR": 35270, - "?!\"": 35271, - "ĠPrest": 35272, - "Ġë§ŀìķĦìļĶ": 35273, - "Ġtand": 35274, - "Ġlash": 35275, - "çĬ": 35276, - "ificant": 35277, - "Ġintoler": 35278, - "ĠгеÑĢо": 35279, - "Ġteu": 35280, - "aso": 35281, - "ĠÑģовеÑĤ": 35282, - "Ġtravelers": 35283, - "ĠSynd": 35284, - "ĠвеÑĢÑģ": 35285, - "Fonda": 35286, - "adı": 35287, - "Ġtranscription": 35288, - "Ġtitanium": 35289, - "Ġtwists": 35290, - "Ġgearbox": 35291, - "ensation": 35292, - "fat": 35293, - "Coll": 35294, - "ĠCommonwealth": 35295, - "zon": 35296, - "ĠPolizei": 35297, - "ĠAPPLAUSE": 35298, - "fry": 35299, - "ĠJuda": 35300, - "esteem": 35301, - "Ġsock": 35302, - "ĠJugend": 35303, - "ĠкÑģÑĤаÑĤи": 35304, - "ĠDro": 35305, - "Ġprochaine": 35306, - "ãĥ¼ãĥ«": 35307, - "Ġliksom": 35308, - "ĠEnergie": 35309, - "ĠMarina": 35310, - "Ġ230": 35311, - "Ġê°ĢìĦľ": 35312, - "umping": 35313, - "Ġlone": 35314, - "ç´ļ": 35315, - "Ġfonts": 35316, - "Ġbusinessman": 35317, - "Ġply": 35318, - "Ġdoe": 35319, - "grid": 35320, - "ĠMilwaukee": 35321, - "ĠEden": 35322, - "!\".": 35323, - "ĠÛĮÛģ": 35324, - "ogens": 35325, - "Ġteaser": 35326, - "Ġquién": 35327, - "Ġincentiv": 35328, - "govern": 35329, - "Ġchildcare": 35330, - "Ġsneakers": 35331, - "Ġimprisoned": 35332, - "®": 35333, - "иÑĤеÑģÑĮ": 35334, - "anbul": 35335, - "Ġregain": 35336, - "Ġtranquil": 35337, - "Redner": 35338, - "鼨": 35339, - "IFA": 35340, - "Ġideological": 35341, - "ĠmayorÃŃa": 35342, - "Ġbureau": 35343, - "eterm": 35344, - "ĠDID": 35345, - "ìĬ·": 35346, - "Ġwaving": 35347, - "Ġbeb": 35348, - "Ġár": 35349, - "Ġкв": 35350, - "Ġenvoy": 35351, - "anut": 35352, - "икÑĥ": 35353, - "ĠEnvironment": 35354, - "ĠAssass": 35355, - "ãĤĵãģ§": 35356, - "ĠBread": 35357, - "ĠТÑĥÑĤ": 35358, - "Ġstaircase": 35359, - "ĠDisease": 35360, - "Ġaucun": 35361, - "ĠëĭĪ": 35362, - "Ġconfrontation": 35363, - "Ġ1941": 35364, - "Ġirony": 35365, - "Ġworsh": 35366, - "ãĤĮãĤĭ": 35367, - "Ġfick": 35368, - "ĠNaomi": 35369, - "Ġbackside": 35370, - "ieux": 35371, - "Kap": 35372, - "Ġvedere": 35373, - "Ġlengthy": 35374, - "Ġbreaker": 35375, - "ĠRolle": 35376, - "Ġpredator": 35377, - "Ġnossos": 35378, - "Ġadvertise": 35379, - "è³ĩ": 35380, - "ÑĢоде": 35381, - "Rednerwechsel": 35382, - "reten": 35383, - "Ġcollectors": 35384, - "ıģımız": 35385, - "Ġtrig": 35386, - "Ġaxes": 35387, - "inters": 35388, - "Ġpenalties": 35389, - "ĠOsman": 35390, - "ĠJenna": 35391, - "Ġflakes": 35392, - "Ġtrainers": 35393, - "Ġstunned": 35394, - "ĠScroll": 35395, - "ĠPip": 35396, - "ĠнаÑģÑĤ": 35397, - "ĠnhÃł": 35398, - "ĠSmack": 35399, - "ẫn": 35400, - "ratos": 35401, - "ĠÑĢабоÑĤÑĭ": 35402, - "Ġucz": 35403, - "ĠLemon": 35404, - "ĠSind": 35405, - "Ġpsychic": 35406, - "ĠAbg": 35407, - "Ġmammals": 35408, - "Ġimmersive": 35409, - "Ġbots": 35410, - "Ġverschiedene": 35411, - "Ġgeral": 35412, - "Ġfollower": 35413, - "Ġä»ĸ": 35414, - "Ġseguridad": 35415, - "Ġimmersed": 35416, - "feito": 35417, - "cross": 35418, - "Ġöld": 35419, - "íĥĦ": 35420, - "Ġãģĵãģ®": 35421, - "Ġ×Ķ×Ļ×IJ": 35422, - "ĠJian": 35423, - "Ġbiliyor": 35424, - "area": 35425, - "Ġkaf": 35426, - "Ġgodt": 35427, - "çĽ¸ä¿¡": 35428, - "Ġë°©ìĨ¡": 35429, - "Ġdetriment": 35430, - "æ¥ļ": 35431, - "Ñĸл": 35432, - "ĠÄijâu": 35433, - "Ġchloride": 35434, - "øre": 35435, - "lei": 35436, - "Ġmonte": 35437, - "Ġdifférentes": 35438, - "à¯ģ.": 35439, - "Ġcaregivers": 35440, - "Ġinadequ": 35441, - "Ġfarewell": 35442, - "ĠÑĤипа": 35443, - "ontec": 35444, - "ĠEph": 35445, - "HHH": 35446, - "ĠTodos": 35447, - "ĠСШÐIJ": 35448, - "Ġtrov": 35449, - "Ġlige": 35450, - "Ġcông": 35451, - "ĠCiv": 35452, - "Ġcapaz": 35453, - "ĠVallahi": 35454, - "Ġqueste": 35455, - "Ġreplica": 35456, - "سب": 35457, - "zna": 35458, - "ĠÑģлÑĥж": 35459, - "ĠPT": 35460, - "wave": 35461, - "ieni": 35462, - "Ġrelied": 35463, - "develop": 35464, - "Ġdeme": 35465, - "ĠAman": 35466, - "Ġ[...]": 35467, - "Ġcompliments": 35468, - "uais": 35469, - "ĠíĮ¨": 35470, - "Ġsmelling": 35471, - "Ġdadurch": 35472, - "ÙĪت": 35473, - "Ġoranges": 35474, - "Ġлай": 35475, - "Ġstabilization": 35476, - "åĢį": 35477, - "ãĤĮãģŁ": 35478, - "楽": 35479, - "Ġappliances": 35480, - "Ġhm": 35481, - "ĥIJë©´": 35482, - "odynamics": 35483, - "ĠciÄĻ": 35484, - "ĠCott": 35485, - "MON": 35486, - "ĠMang": 35487, - "æĶ¯æĮģ": 35488, - "Ġallerdings": 35489, - "ική": 35490, - "shots": 35491, - "Ġts": 35492, - "ĠGör": 35493, - "ĠCHAR": 35494, - "Ġ:(": 35495, - "Ġwrath": 35496, - "Ġfique": 35497, - "Ġführen": 35498, - "Ġtestament": 35499, - "Ġ^^": 35500, - "á¹Ľá¹£á¹ĩa": 35501, - "ALD": 35502, - "Ġtexto": 35503, - "ĠDogs": 35504, - "Ġsib": 35505, - "Ġpathetic": 35506, - "ocks": 35507, - "Ġradically": 35508, - "ĠMORE": 35509, - "ĠJAMES": 35510, - "Ġingl": 35511, - "ĠTechnical": 35512, - "Ġporch": 35513, - "ĠUT": 35514, - "ĠобÑıзаÑĤелÑĮно": 35515, - "Ġrenewal": 35516, - "Ġaesthetics": 35517, - "ikum": 35518, - "Ġbeverage": 35519, - "dern": 35520, - "Ġpredictive": 35521, - "Ġchuy": 35522, - "ĠRegarding": 35523, - "ĠForward": 35524, - "ĠÙĪÙĦ": 35525, - "Ġcontextual": 35526, - "Ġdwarf": 35527, - "Ġprehe": 35528, - "Ġgoverned": 35529, - "ħĦ": 35530, - "Ġtrabalhar": 35531, - "Ġnegócio": 35532, - "ĠболÑĮÑĪой": 35533, - "еÑĩаÑĤ": 35534, - "ĠдÑĥÑħ": 35535, - "Ġfloods": 35536, - "Ġbowling": 35537, - "ĠOB": 35538, - "ĠHär": 35539, - "Ġgrading": 35540, - "주ëĬĶ": 35541, - "Ġgars": 35542, - "dling": 35543, - "Ġrak": 35544, - "ëĪ": 35545, - "creat": 35546, - "ĠÑīе": 35547, - "Ġneighbours": 35548, - "food": 35549, - "Query": 35550, - "Ġheroin": 35551, - "iceps": 35552, - "ĠKinda": 35553, - "NET": 35554, - "Ġmari": 35555, - "Ġimitate": 35556, - "Ġachter": 35557, - "Ġsettlements": 35558, - "rare": 35559, - "cciones": 35560, - "Ġëĵľ": 35561, - "Ġfik": 35562, - "itung": 35563, - "ĠмакÑģим": 35564, - "Ġelf": 35565, - "Ġdalla": 35566, - "ĠPolsce": 35567, - "ĠPul": 35568, - "ЧÑĤо": 35569, - "ĠMorgen": 35570, - "ØŃÙħ": 35571, - "Ġsupremacy": 35572, - "Ġkys": 35573, - "ĠHurricane": 35574, - "ĠGTA": 35575, - "ĠFeh": 35576, - "Ġfinalmente": 35577, - "mund": 35578, - "ĠKrie": 35579, - "époque": 35580, - "ĠTucker": 35581, - "ITT": 35582, - "Ġlur": 35583, - "Ġdipping": 35584, - "äv": 35585, - "Ġeerste": 35586, - "ĠFlint": 35587, - "bildung": 35588, - "ูà¹ī": 35589, - "Ġtoim": 35590, - "Ġpracy": 35591, - "Ġtransforms": 35592, - "Ġspeeding": 35593, - "Ġpresenter": 35594, - "Ġfellows": 35595, - "filled": 35596, - "ieza": 35597, - "Ġadvising": 35598, - "ĠInterview": 35599, - "игÑĢ": 35600, - "wehr": 35601, - "ĠDante": 35602, - "pture": 35603, - "Ī문": 35604, - "¯¸ë": 35605, - "IJIJ": 35606, - "ĠCounter": 35607, - "Ġcrist": 35608, - "Ġì§ľ": 35609, - "Ġjeune": 35610, - "ĠÑģÑĤÑĢаÑĪ": 35611, - "ĠmieÄĩ": 35612, - "Ġtutor": 35613, - "Ġmasala": 35614, - "Ġpowdered": 35615, - "Ġnau": 35616, - "ĠFrederick": 35617, - "Ġbilling": 35618, - "ĠEisen": 35619, - "ĠдобÑĢ": 35620, - "Ġmest": 35621, - "æ½": 35622, - "Ġsnipp": 35623, - "Ġmono": 35624, - "ĠAlo": 35625, - "ĠMercy": 35626, - "érience": 35627, - "Ġcasualties": 35628, - "ĠANNOUNCER": 35629, - "ä»İ": 35630, - "Ġtocar": 35631, - "Ġbacterial": 35632, - "Ho": 35633, - "Ġstreak": 35634, - "ĠJENN": 35635, - "Ġplast": 35636, - "Ñģлед": 35637, - "Ġreapp": 35638, - "Ġpaycheck": 35639, - "Ġminers": 35640, - "habt": 35641, - "ĠJap": 35642, - "нÑĥÑĤ": 35643, - "Ġredemption": 35644, - "Ġquir": 35645, - "hnlich": 35646, - "Ġaccumulation": 35647, - "Ġshove": 35648, - "Ġadrenaline": 35649, - "Make": 35650, - "ĠHern": 35651, - "ossing": 35652, - "ĠVil": 35653, - "ubby": 35654, - "hertz": 35655, - "breaks": 35656, - "Ġspur": 35657, - "ĠDaha": 35658, - "USTIN": 35659, - "Ġcontinuer": 35660, - "ĠSaul": 35661, - "ãģ®ãģ¯": 35662, - "ĠíıŃ": 35663, - "ĠëIJĺë©´": 35664, - "Ġë§IJìĶĢ": 35665, - "Ġож": 35666, - "Ġsuspects": 35667, - "Ġlaquelle": 35668, - "ĠMuchas": 35669, - "Ġvöllig": 35670, - "ulen": 35671, - "Ġimpres": 35672, - "Ġlobb": 35673, - "enee": 35674, - "Ġнаж": 35675, - "Ta": 35676, - "Ġréalité": 35677, - "ĠRex": 35678, - "Ġharvesting": 35679, - "Ġestr": 35680, - "æ¶": 35681, - "ospace": 35682, - "OSS": 35683, - "Ġdisturbance": 35684, - "assic": 35685, - "ĠIsab": 35686, - "Ġdécouv": 35687, - "ĠHampshire": 35688, - "Ġornament": 35689, - "Ġluôn": 35690, - "ĠUW": 35691, - "ĠjÄħ": 35692, - "éĤ£ä¹Ī": 35693, - "Ġrespecto": 35694, - "Ġcomunidad": 35695, - "Ġcomigo": 35696, - "agna": 35697, - "Ġintrinsic": 35698, - "ĠAlumni": 35699, - "Ġsesleri": 35700, - "Ġestimation": 35701, - "âĢĶâĢĶ": 35702, - "Ġproduit": 35703, - "ãĢĤãĢį": 35704, - "ĠвÑĢ": 35705, - "Ġwhirl": 35706, - "Ġacces": 35707, - "çu": 35708, - "Ġvariability": 35709, - "Ġvodka": 35710, - "itsu": 35711, - "Ġinternships": 35712, - "Ġallocate": 35713, - "RR": 35714, - "íĽĪ": 35715, - "Ġinstructional": 35716, - "tant": 35717, - "Ġà®ħத": 35718, - "Ġinvites": 35719, - "Ġhak": 35720, - "Ġscares": 35721, - "Ġeclipse": 35722, - "пов": 35723, - "колÑĮ": 35724, - "ativas": 35725, - "Ġstabbed": 35726, - "ĠDOM": 35727, - "ä¸įåĪ°": 35728, - "roots": 35729, - "ĠPicture": 35730, - "íĺ¼": 35731, - "ĠCHA": 35732, - "iec": 35733, - "ıı": 35734, - "hanol": 35735, - "Ġmisunderstand": 35736, - "Ray": 35737, - "Ġroadmap": 35738, - "ocumented": 35739, - "izione": 35740, - "ĠOlive": 35741, - "rift": 35742, - "Ġ×Ķ׳": 35743, - "æ¯į": 35744, - "lest": 35745, - ";;": 35746, - "ĠEA": 35747, - "éľĢè¦ģ": 35748, - "одÑĥ": 35749, - "Ġhobbies": 35750, - "Ġburial": 35751, - "ãģ«ãģ¡ãģ¯": 35752, - "Ф": 35753, - "lege": 35754, - "ĠHJ": 35755, - "Ġobjection": 35756, - "ĠãģŃ": 35757, - "ctory": 35758, - "Ġincremental": 35759, - "Ġgymn": 35760, - "Ġepidemi": 35761, - "ÑģÑĭл": 35762, - "Ãij": 35763, - "Ġadvancement": 35764, - "Ġparch": 35765, - "News": 35766, - "Ġayr": 35767, - "лам": 35768, - "Ġ׾ש": 35769, - "Ġdiploma": 35770, - "ãģ¡ãĤĥãĤĵ": 35771, - "Ġrobbed": 35772, - "Only": 35773, - "Ġincur": 35774, - "Ġchanting": 35775, - "Ġíķ´ëıĦ": 35776, - "Ġriches": 35777, - "ĠCarmen": 35778, - "Ġnostro": 35779, - "λÎŃ": 35780, - "ĠPowder": 35781, - "à¹Ģห": 35782, - "ĠìŀĪìľ¼ë©´": 35783, - "Ġgerçekten": 35784, - "ĠPikachu": 35785, - "емон": 35786, - "OLL": 35787, - "Ġplanetary": 35788, - "Ġslows": 35789, - "Ġclockwise": 35790, - "alion": 35791, - "ĠìĮ": 35792, - "Ġvern": 35793, - "Ġhomme": 35794, - "Ġendpoint": 35795, - "Ġinnocence": 35796, - "Ġelementos": 35797, - "Ġsophomore": 35798, - "Ġnotions": 35799, - "ĠCouldn": 35800, - "pur": 35801, - "Ġzat": 35802, - "Ġobsess": 35803, - "Ġmotivo": 35804, - "ĠKub": 35805, - "ĠDrug": 35806, - "Ant": 35807, - "ĠPlayers": 35808, - "ĠHumans": 35809, - "Ġmelee": 35810, - "ĠWildlife": 35811, - "ĠVP": 35812, - "Ġvolcanic": 35813, - "Ġcomin": 35814, - "ĠGuang": 35815, - "ĠÏĦιÏĤ": 35816, - "ĠоÑģобенно": 35817, - "ĠSize": 35818, - "Listen": 35819, - "ĠAaa": 35820, - "appro": 35821, - "Ġbarbar": 35822, - "ĠParkinson": 35823, - "нÑıÑĤÑĮ": 35824, - "åį°": 35825, - "Ġunderestimate": 35826, - "Ġsubstitution": 35827, - "Ġcosmetic": 35828, - "ä¸ĭ次": 35829, - "Ġwillen": 35830, - "Ġbeide": 35831, - "anni": 35832, - "Ġconditioned": 35833, - "ĠDebbie": 35834, - "Ġisto": 35835, - "ĠEdwards": 35836, - "ìĽĮìļĶ": 35837, - "ĠÑĤов": 35838, - "Ġabbrevi": 35839, - "ĠMün": 35840, - "ĠPrinc": 35841, - "ĠLiang": 35842, - "Ġstink": 35843, - "Ġradioactive": 35844, - "ãģĨãĤı": 35845, - "Ġacontec": 35846, - "Ġuncon": 35847, - "ĠTurbo": 35848, - "ãģIJ": 35849, - "Ġkisses": 35850, - "æĺ¯ä»Ģ麼": 35851, - "еÑĤÑĢов": 35852, - "Ġfrontier": 35853, - "ĠSpy": 35854, - "ĠBelarus": 35855, - "ĠCBS": 35856, - "á»Ĺ": 35857, - "amoto": 35858, - "íķľëį°": 35859, - "ĠÑģÑĤÑĢо": 35860, - "ĠEnfin": 35861, - "Ġbreadth": 35862, - "éĺ²": 35863, - "ĠCafe": 35864, - "ĠDafür": 35865, - "ĠBour": 35866, - "aras": 35867, - "Ġblueprint": 35868, - "anı": 35869, - "Ġconstants": 35870, - "Ġattacker": 35871, - "ĠFormula": 35872, - "zaÄĩ": 35873, - "Ġsowie": 35874, - "Ġeyebrow": 35875, - "obook": 35876, - "Ġsetzen": 35877, - "第ä¸ī": 35878, - "onsider": 35879, - "awning": 35880, - "Ġsöyleye": 35881, - "Ġinvaded": 35882, - "Ġpronouns": 35883, - "Ġdobry": 35884, - "Si": 35885, - "ĠХоÑĤ": 35886, - "Ġvolleyball": 35887, - "Ġlament": 35888, - "isches": 35889, - "arme": 35890, - "api": 35891, - "ĠWiki": 35892, - "лиÑĪ": 35893, - "Ġkasih": 35894, - "Ġpess": 35895, - "ĠÑĦоÑĤ": 35896, - "ĠSul": 35897, - "å¾·": 35898, - "Ġpseudo": 35899, - "Ġmemo": 35900, - "ĠìĹ°ìĬµ": 35901, - "ĠдоллаÑĢов": 35902, - "ĠпеÑĢем": 35903, - "ĠReach": 35904, - "miral": 35905, - "alted": 35906, - "Ġstatut": 35907, - "reading": 35908, - "Ġsöyled": 35909, - "ĠLindsey": 35910, - "ĠAhmad": 35911, - "ë¶Ģë": 35912, - "ĠСегоднÑı": 35913, - "Ġprzygot": 35914, - "Ġhyster": 35915, - "URE": 35916, - "ĠNeigh": 35917, - "Reporter": 35918, - "ĠBunu": 35919, - "ĠTreaty": 35920, - "ĠRank": 35921, - "ĠFame": 35922, - "inished": 35923, - "Ġgeared": 35924, - "Ġcompose": 35925, - "odia": 35926, - "ĠLon": 35927, - "ĠjesteÅĽmy": 35928, - "ĠDIRECTOR": 35929, - "Ġelkaar": 35930, - "ĠViel": 35931, - "×IJש": 35932, - "ynthia": 35933, - "並": 35934, - "Ġmère": 35935, - "ĠTomato": 35936, - "Ġexatamente": 35937, - "niÄĻ": 35938, - "ĠFrei": 35939, - "ĠDif": 35940, - "Ġopenings": 35941, - "Ġgraphical": 35942, - "ĠÑĥдоб": 35943, - "ĠвÑģп": 35944, - "ĠWeekly": 35945, - "ева": 35946, - "Ġhangs": 35947, - "Ġunsafe": 35948, - "Ġemblem": 35949, - "ĠKolleginnen": 35950, - "alay": 35951, - "Ġksi": 35952, - "Ġhides": 35953, - "Ġolmay": 35954, - "Ġentste": 35955, - "Ġarthritis": 35956, - "ÃŁerdem": 35957, - "Ġbinnen": 35958, - "Ġlistens": 35959, - "ĠHess": 35960, - "åĨįä¾Ĩ": 35961, - "ĠLouise": 35962, - "lden": 35963, - "енÑģ": 35964, - "ĠVersion": 35965, - "ĠAgriculture": 35966, - "ìĬ¤ë¥¼": 35967, - "ман": 35968, - "ëĦ¤ìļĶ": 35969, - "Ġwines": 35970, - "ĠINF": 35971, - "rul": 35972, - "ĠJK": 35973, - "ıyorlar": 35974, - "shield": 35975, - "reath": 35976, - "Ġterus": 35977, - "ĠLum": 35978, - "Ġanticipation": 35979, - "Ġaccustomed": 35980, - "ĠMina": 35981, - "Ġwield": 35982, - "ioè": 35983, - "mera": 35984, - "Ġcountdown": 35985, - "Ġcling": 35986, - "Ġcommend": 35987, - "Ġfaktiskt": 35988, - "Ġdefenses": 35989, - "Ġcockpit": 35990, - "Ġкоманд": 35991, - "Ġdishwas": 35992, - "ĠThanos": 35993, - "Ġkidneys": 35994, - "Ġsehe": 35995, - "Ġmicrobes": 35996, - "Ġcuff": 35997, - "ĠвÑĭÑģок": 35998, - "ĠSpicy": 35999, - "çŃīçŃī": 36000, - "வர": 36001, - "culus": 36002, - "orc": 36003, - "ç¾ħ": 36004, - "ixes": 36005, - "ĠCredit": 36006, - "Ġraj": 36007, - "Ġbringt": 36008, - "ĠNiss": 36009, - "Ġgrim": 36010, - "ĠSOL": 36011, - "Ġtenim": 36012, - "ĠSudan": 36013, - "ĠSpart": 36014, - "Ġpromotes": 36015, - "ĠNossa": 36016, - "ĠÑģоÑģÑĤоÑıни": 36017, - "Ġì°©": 36018, - "Ġuncont": 36019, - "ĠLiberal": 36020, - "ĠТолÑĮко": 36021, - "ĠViele": 36022, - "Ġktórej": 36023, - "Ġ****": 36024, - "Max": 36025, - "ĠЧÑĤобÑĭ": 36026, - "350": 36027, - "Ġíĺ¼ìŀIJ": 36028, - "Ġë¶Ħëĵ¤ìĿ´": 36029, - "Ġwarp": 36030, - "Ġtenga": 36031, - "Ġsympathetic": 36032, - "Ġbizi": 36033, - "ĠZack": 36034, - "iedo": 36035, - "Ġëī´ì": 36036, - "piel": 36037, - "ĠÑĤол": 36038, - "Ġscaled": 36039, - "ĠPETER": 36040, - "ĠCOMM": 36041, - "ĠCame": 36042, - "Ġcatastrophe": 36043, - "Ġsweaty": 36044, - "igration": 36045, - "Ġstuffing": 36046, - "ĠÏĢολÏį": 36047, - "ĠDriver": 36048, - "zyst": 36049, - "Tech": 36050, - "Ġassessed": 36051, - "ĠSurface": 36052, - "ırım": 36053, - "sur": 36054, - "lerweile": 36055, - "Ġдог": 36056, - "Ġshutting": 36057, - "Ġfractions": 36058, - "ĠÑģол": 36059, - "everyone": 36060, - "Ġern": 36061, - "ĠÐĿов": 36062, - "Ġdefenders": 36063, - "Ġversucht": 36064, - "ãĥ³ãĥĢ": 36065, - "Ġpolity": 36066, - "ĠÐŁÐ¾Ð½": 36067, - "verständ": 36068, - "Ġbrowsers": 36069, - "Ġtransformative": 36070, - "Ġdictate": 36071, - "ĠLEGO": 36072, - "Ġninguna": 36073, - "ê´ij": 36074, - "Ġpizz": 36075, - "ĠHarold": 36076, - "ĠLopez": 36077, - "Ú¾ÛĮ": 36078, - "anız": 36079, - "atchet": 36080, - "ÙĬت": 36081, - "Ġlernen": 36082, - "Ġê·ĢìŬ": 36083, - "Ġhoused": 36084, - "Ġcleanse": 36085, - "ĠWAT": 36086, - "laration": 36087, - "Ġbytes": 36088, - "Ġtucked": 36089, - "Ġfaults": 36090, - "до": 36091, - "FX": 36092, - "Ġìĸ¼ë§ĪëĤĺ": 36093, - "Ġdeform": 36094, - "Ġcontracting": 36095, - "ĠTIME": 36096, - "irse": 36097, - "Ġneben": 36098, - "Ġcerc": 36099, - "ĠArmstrong": 36100, - "Ġtester": 36101, - "Ġparfait": 36102, - "Ġjealousy": 36103, - "Ġtoxins": 36104, - "Ġdisbel": 36105, - "ÑĥÑĢÑĭ": 36106, - "impression": 36107, - "Ġprostate": 36108, - "Ġfirewall": 36109, - "Ġclassics": 36110, - "еÑĩÑĮ": 36111, - "Ġsocialism": 36112, - "Ġgracious": 36113, - "ĠÑģнова": 36114, - "ĠднÑı": 36115, - "Ġburner": 36116, - "ĠMinor": 36117, - "Ġìļ°ë¦¬ë": 36118, - "Ġjedes": 36119, - "Ġcontinuum": 36120, - "Ġhots": 36121, - "Ġoccurrence": 36122, - "Ġadministered": 36123, - "ĠзамеÑĤ": 36124, - "Ġhesitation": 36125, - "Ġdrills": 36126, - "erca": 36127, - "ĠвÑĤоÑĢой": 36128, - "Ġsteadily": 36129, - "Ġinsanlar": 36130, - "Ġihan": 36131, - "íij": 36132, - "Ġhelper": 36133, - "ĠSenin": 36134, - "åģľ": 36135, - "ование": 36136, - "ĠERIC": 36137, - "bla": 36138, - "ĠAcademic": 36139, - "Ġhumanities": 36140, - "black": 36141, - "umpy": 36142, - "ortex": 36143, - "ĠìłĪë": 36144, - "ĠØ¥ÙĨ": 36145, - "Ġdisclose": 36146, - "ĠElijah": 36147, - "ĠλÎŃ": 36148, - "ĠQuer": 36149, - "بÙĦ": 36150, - "ãĤ¡": 36151, - "Tell": 36152, - "arle": 36153, - "ÑĸÑĢ": 36154, - "Ġaugmented": 36155, - "Ġë¹ĦìĬ·": 36156, - "Ġandroid": 36157, - "त": 36158, - "arma": 36159, - "Ġszer": 36160, - "geord": 36161, - "Ġgeek": 36162, - "Ġyeux": 36163, - "Ġpong": 36164, - "ĠãģĿãģĨ": 36165, - "Ġtortured": 36166, - "ĠBath": 36167, - "zig": 36168, - "asonable": 36169, - "Ġnets": 36170, - "Ġbaru": 36171, - "ĠFlat": 36172, - "ĠVater": 36173, - "ĠTerror": 36174, - "ĠAvo": 36175, - "Ġceremonies": 36176, - "roe": 36177, - "Ùģس": 36178, - "Ops": 36179, - "Ġhyvin": 36180, - "Ġapresent": 36181, - "olor": 36182, - "ĠигÑĢÑĭ": 36183, - "orton": 36184, - "Ġê·¸ëŀ¬": 36185, - "Ġlookin": 36186, - "ĠTY": 36187, - "ĠMint": 36188, - "Add": 36189, - "Ġmite": 36190, - "ĠSmoke": 36191, - "Ġnota": 36192, - "Ġmoss": 36193, - "ĠAbend": 36194, - "Ġ컨": 36195, - "Ġexaggerated": 36196, - "fires": 36197, - "Ġredist": 36198, - "ffiti": 36199, - "Ġopenness": 36200, - "ê°IJìĿ´": 36201, - "endeu": 36202, - "енной": 36203, - "Watch": 36204, - "Ġavatar": 36205, - "ĠPey": 36206, - "urun": 36207, - "Ġsenza": 36208, - "Ġì§ĢìĹŃ": 36209, - "ĠNatomiast": 36210, - "Ġemergence": 36211, - "rays": 36212, - "Ġcrafted": 36213, - "gary": 36214, - "ãģłãģij": 36215, - "üng": 36216, - "-\"": 36217, - "Ġhacked": 36218, - "Ġstray": 36219, - "encie": 36220, - "emo": 36221, - "Ġcomen": 36222, - "ĠKız": 36223, - "ĠJasmine": 36224, - "ĠHindi": 36225, - "manas": 36226, - "Ġinfinitely": 36227, - "emon": 36228, - "ìĿ¸ëį°ìļĶ": 36229, - "jak": 36230, - "Ġroaring": 36231, - "érique": 36232, - "sweise": 36233, - "ĠRolex": 36234, - "åł±å°İ": 36235, - "ĠStuart": 36236, - "bnb": 36237, - "Ġdiagnose": 36238, - "Ġcoherent": 36239, - "ĠMJ": 36240, - "æºĸåĤĻ": 36241, - "Ġpike": 36242, - "lav": 36243, - "Ġorchestral": 36244, - "аÑģÑĤи": 36245, - "Ġterminar": 36246, - "Ġgatherings": 36247, - "Ġcompliant": 36248, - "Ġupgrading": 36249, - "Ġregulator": 36250, - "Ġlanç": 36251, - "éĢ£": 36252, - "Ġmerchants": 36253, - "tawa": 36254, - "Ġmonitored": 36255, - "Ġrendre": 36256, - "两": 36257, - "Ġunterwegs": 36258, - "anguard": 36259, - "gard": 36260, - "ĠBelow": 36261, - "duino": 36262, - "ĠЦе": 36263, - "Ġimpedance": 36264, - "ìľ¡": 36265, - "份": 36266, - "Ġaktuell": 36267, - "ĠVatic": 36268, - "åŃ©": 36269, - "Ġstewards": 36270, - "Ġbrightest": 36271, - "Ġkenn": 36272, - "Ġkau": 36273, - "ĠMatrix": 36274, - "ĠBark": 36275, - "ĠðŁij": 36276, - "Ġtaper": 36277, - "Ġcasino": 36278, - "ר×Ķ": 36279, - "ysical": 36280, - "Ġbuilders": 36281, - "ĠczÅĤowie": 36282, - "ĠNepal": 36283, - "Ġ!\"": 36284, - "Ġterme": 36285, - "Ġinnych": 36286, - "Ġmaths": 36287, - "Ġdrafted": 36288, - "ĠBalk": 36289, - "Ġhesitant": 36290, - "Ġvoltar": 36291, - "Ġrevive": 36292, - "ĠÑĦилÑĮма": 36293, - "Ġassassin": 36294, - "ĠSolutions": 36295, - "Ġduel": 36296, - "Ġbearings": 36297, - "à¸Ħะ": 36298, - "Ġrookie": 36299, - "ikat": 36300, - "Ġbiscuits": 36301, - "Ġcords": 36302, - "ÑĥваÑĤи": 36303, - "ARIN": 36304, - "Ġprogressing": 36305, - "ĠGir": 36306, - "Ġpenetrate": 36307, - "ĠStorage": 36308, - "eight": 36309, - "ĠÑĤÑĢÑĥ": 36310, - "ĠdonÃŃt": 36311, - "Ġsizin": 36312, - "Ġoutdated": 36313, - "ĠнаÑĪи": 36314, - "Ġaffir": 36315, - "Ġspoons": 36316, - "Ġoni": 36317, - "Ġflank": 36318, - "ĠGol": 36319, - "hã": 36320, - "Ġpéri": 36321, - "Ġhonorable": 36322, - "ĠBreathe": 36323, - "scenes": 36324, - "Ġobviamente": 36325, - "икÑģ": 36326, - "Ġש×ŀ×": 36327, - "Ġsmoothie": 36328, - "ŀĪë": 36329, - "Ġdime": 36330, - "ĠíĸĪìĸ´ìļĶ": 36331, - "Ġappel": 36332, - "ĠCatholics": 36333, - "Ġsingles": 36334, - "Ġlaten": 36335, - "Ġçünkü": 36336, - "ĠVader": 36337, - "æıĽ": 36338, - "Ġvardı": 36339, - "ĠIstanbul": 36340, - "gré": 36341, - "ĠElsa": 36342, - "ël": 36343, - "Ġinvece": 36344, - "Ġcrane": 36345, - "Ġobe": 36346, - "ĠShark": 36347, - "Ġsmack": 36348, - "Ġrestoring": 36349, - ".\\": 36350, - "Ġë¹łë": 36351, - "Ġfaded": 36352, - "umbers": 36353, - "Singing": 36354, - "Ġdepressing": 36355, - "thest": 36356, - "ĠWahr": 36357, - "Ġmultitude": 36358, - "ÑĢавÑģÑĤвÑĥйÑĤе": 36359, - "rijk": 36360, - "eka": 36361, - "Ġcompletes": 36362, - "ĠWells": 36363, - "Ġroy": 36364, - "ĠPray": 36365, - "ĠKalau": 36366, - "izin": 36367, - "iaÅĤem": 36368, - "Ġlocom": 36369, - "ĠNashville": 36370, - "ĠPentagon": 36371, - "미": 36372, - "ĠNEW": 36373, - "ÄħÄĩ": 36374, - "ÃŃss": 36375, - "Ġmarrying": 36376, - "Ġfeud": 36377, - "íĻķ": 36378, - "æĢ¥": 36379, - ")!": 36380, - "ĠOperations": 36381, - "ÑĥÑĶ": 36382, - "Ġmoje": 36383, - "Ġinstructed": 36384, - "ĠëĪĦ구": 36385, - "Ġ×Ķ×Ĵ": 36386, - "ĠпомоÑīÑĮÑİ": 36387, - "Ġsabia": 36388, - "ìķĺìĸ´ìļĶ": 36389, - "plane": 36390, - "pri": 36391, - "ĠполноÑģÑĤÑĮÑİ": 36392, - "ĠKitty": 36393, - "Ġpróprio": 36394, - "edere": 36395, - "Ġinteresante": 36396, - "Ġде": 36397, - "Ġcondensed": 36398, - "Ġavent": 36399, - "TOR": 36400, - "Ġgreasy": 36401, - "ARK": 36402, - "orta": 36403, - "AJ": 36404, - "Ġdisreg": 36405, - "Ġcorrections": 36406, - "Ġstero": 36407, - "Ġinfluenza": 36408, - "Ġdesses": 36409, - "Ġballots": 36410, - "Ġmeget": 36411, - "Ġmafia": 36412, - "Ġböl": 36413, - "nost": 36414, - "ĠÑģÑĤаÑĤÑĮ": 36415, - "Ġresponder": 36416, - "Ġhinten": 36417, - "grav": 36418, - "à¸Ńะ": 36419, - "ynchron": 36420, - "Ġviens": 36421, - "Ġsamo": 36422, - "Ġdt": 36423, - "pannt": 36424, - "ĠÅĽwiat": 36425, - "ĠзапиÑģ": 36426, - "Ġmerged": 36427, - "Ġkep": 36428, - "Ġmisleading": 36429, - "Ġdigamos": 36430, - "Ġammon": 36431, - "è¾Ľ": 36432, - "chet": 36433, - "Ġê°Ģìł¸": 36434, - "Ġuni": 36435, - "ĠëIJĺëĬĶëį°": 36436, - "ĠнапÑĢав": 36437, - "ĠкоÑĤоÑĢого": 36438, - "Ġanimate": 36439, - "×ķ×IJ×": 36440, - "еÑĢв": 36441, - "Ġminced": 36442, - "Ġkaum": 36443, - "ãģĤãģģ": 36444, - "ÏĢε": 36445, - "лег": 36446, - "existing": 36447, - "Ġplataform": 36448, - "ĠKRIS": 36449, - "ìĽł": 36450, - "ĠFamilien": 36451, - "ĠLibya": 36452, - "Ġbiodiversity": 36453, - "Ġidiots": 36454, - "irdi": 36455, - "Ġszyb": 36456, - "ĠRolling": 36457, - "ücht": 36458, - "ĠÑĥдив": 36459, - "ÑģÑĥд": 36460, - "Ġrealizar": 36461, - "Ġcanned": 36462, - "ĠÑĢан": 36463, - "Ġmetabolic": 36464, - "ĠBeef": 36465, - "Ġkilka": 36466, - "лÑİÑģ": 36467, - "Ġregistry": 36468, - "моÑĤÑĢиÑĤе": 36469, - "Ġvielä": 36470, - "Ġodc": 36471, - "Ġcondemned": 36472, - "æ©ĭ": 36473, - "fal": 36474, - "ĠDil": 36475, - "woÅĽci": 36476, - "Aw": 36477, - "Ġstatistically": 36478, - "Ġsogen": 36479, - "ĠBETH": 36480, - "Ġshaving": 36481, - "幸": 36482, - "ocal": 36483, - "ĠFunny": 36484, - "Ġpeacefully": 36485, - "Ġaddictive": 36486, - "ĠInsert": 36487, - "lauf": 36488, - "Ġexperiencia": 36489, - "é¦ĸåħĪ": 36490, - "иÑĤелÑı": 36491, - "ÃŃgen": 36492, - "ágina": 36493, - "Ġabdomen": 36494, - "íķľëĭ¤": 36495, - "icus": 36496, - "imana": 36497, - "ìį¨": 36498, - "arching": 36499, - "Ġkonkret": 36500, - "ìķĺë": 36501, - "ека": 36502, - "oufl": 36503, - "ivel": 36504, - "Ġnude": 36505, - "ètres": 36506, - "Ġmonsieur": 36507, - "Ġclash": 36508, - "Ġtherapists": 36509, - "Ġcubed": 36510, - "Ġretrouver": 36511, - "Ġwaveform": 36512, - "Ġpotem": 36513, - "ĠFormer": 36514, - "isión": 36515, - "åºľ": 36516, - "Ġ×IJ×Ŀ": 36517, - "undos": 36518, - "ĠMeinung": 36519, - "صÙĦ": 36520, - "ĠJude": 36521, - "ĠnÃ¥r": 36522, - "ĠLeonardo": 36523, - "ĠCristo": 36524, - "ĠGOT": 36525, - "ÑģÑĤÑĢÑĥк": 36526, - "LAN": 36527, - "ĠgÃ¥ng": 36528, - "Ġdéb": 36529, - "ĠFrankfurt": 36530, - "Ġcrappy": 36531, - "Ġlil": 36532, - "année": 36533, - "ĠмеÑģÑĤе": 36534, - "RET": 36535, - "ĠNer": 36536, - "ĠCOSTA": 36537, - "Ġjedem": 36538, - "Ġcurtains": 36539, - "Ġiterations": 36540, - "Ġunav": 36541, - "Ġplaque": 36542, - "orum": 36543, - "Ġζ": 36544, - "Ġnúmeros": 36545, - "Ġdesap": 36546, - "²½": 36547, - "Ġcompiled": 36548, - "Ġrefle": 36549, - "Ġrankings": 36550, - "Ġrepaired": 36551, - "ĠÐĿапÑĢ": 36552, - "Ġdownloads": 36553, - "Ġarmour": 36554, - "Ġ×Ļ×ķתר": 36555, - "Ġlongevity": 36556, - "ĠTONER": 36557, - "ĠкомменÑĤаÑĢ": 36558, - "Ġczego": 36559, - "Ġnotify": 36560, - "Ġairports": 36561, - "Ġenduring": 36562, - "lette": 36563, - "Ġapparat": 36564, - "Ġhabil": 36565, - "á»ĩc": 36566, - "nad": 36567, - "ICO": 36568, - "ĠBrah": 36569, - "Ġsegún": 36570, - "Ġgovernors": 36571, - "kaha": 36572, - "ĠSchluss": 36573, - "Ġodpowied": 36574, - "irting": 36575, - "Ġrempl": 36576, - "ĠAboriginal": 36577, - "identally": 36578, - "Ġenhancing": 36579, - "licting": 36580, - "ĠHawaiian": 36581, - "Ġstriving": 36582, - "ĠNiet": 36583, - "Ġznaczy": 36584, - "Ġobedience": 36585, - "ĠnÃ¥got": 36586, - "Ġexpired": 36587, - "Ġ1918": 36588, - "presented": 36589, - "Ġprowad": 36590, - "ĠTerr": 36591, - "ĠPrinceton": 36592, - "Ġmorgen": 36593, - "Ġattracting": 36594, - "ĠSigma": 36595, - "igner": 36596, - "ĠRechts": 36597, - "ĠPeki": 36598, - "Ġmethy": 36599, - "Ġhamm": 36600, - "Ġdireito": 36601, - "Ġdelegation": 36602, - "иваÑİÑĤ": 36603, - "Ġgin": 36604, - "Young": 36605, - "Ġdependencies": 36606, - "ĠBradley": 36607, - "buds": 36608, - "Ġfis": 36609, - "Ġpytanie": 36610, - "Ġinterconnected": 36611, - "Ġembaixo": 36612, - "ĠSas": 36613, - "Ġruh": 36614, - "ĠSicht": 36615, - "Sur": 36616, - "Ġsuperb": 36617, - "ĠSabbath": 36618, - "ĠDanger": 36619, - "kol": 36620, - "Ġhou": 36621, - "supp": 36622, - "ĠNacional": 36623, - "Ġsuccession": 36624, - "Ġvá": 36625, - "ĠMaÃŁnahmen": 36626, - "ĠJessie": 36627, - "ĠIdaho": 36628, - "forest": 36629, - "ħĺ": 36630, - "Ġ×ŀ×ĵ": 36631, - "ĠØ£ÙĬ": 36632, - "Ġsweetheart": 36633, - "Ġneatly": 36634, - "ĠEvangel": 36635, - "곡": 36636, - "ĠSuite": 36637, - "ública": 36638, - "ĠÑĥли": 36639, - "ĠAnnouncer": 36640, - "ligh": 36641, - "Ġsensations": 36642, - "Ġshelters": 36643, - "Ġhart": 36644, - "Ġsqueezing": 36645, - "ĠRivers": 36646, - "ĠCooking": 36647, - "ì±ħ": 36648, - "personal": 36649, - "Ġmanos": 36650, - "ÑijÑĤÑģÑı": 36651, - "wij": 36652, - "Ġgogg": 36653, - "ĠMilli": 36654, - "ĠFP": 36655, - "ünst": 36656, - "ĠLS": 36657, - "Ġspraying": 36658, - "Ġfaux": 36659, - "Ġautograph": 36660, - "ologic": 36661, - "Ġtorment": 36662, - "Ġencrypted": 36663, - "á»ħ": 36664, - "Ġestre": 36665, - "ç¹¼": 36666, - "à±": 36667, - "Ġstumbled": 36668, - "Ġaider": 36669, - "Ġsaben": 36670, - "xter": 36671, - "ĠCities": 36672, - "ĠTürk": 36673, - "ëĭ¥": 36674, - "chine": 36675, - "Ġtopping": 36676, - "Ġpoisoned": 36677, - "ĠRomania": 36678, - "×ĵ×Ļ": 36679, - "Ģë¡ľ": 36680, - "ĠпоÑĢÑıд": 36681, - "Ġchirping": 36682, - "ĠìĻĦë": 36683, - "×ij×¢": 36684, - "Ġcuanto": 36685, - "Ġdonating": 36686, - "ĠRegent": 36687, - "ĠBeruf": 36688, - "Ġdistracting": 36689, - "Ġstamina": 36690, - "ĠDarren": 36691, - "Ġì¶ķ": 36692, - "lists": 36693, - "dal": 36694, - "chuss": 36695, - "Ġeconomist": 36696, - "ãģĪãĥ¼": 36697, - "orgt": 36698, - "Ġistiyorum": 36699, - "è¿Ľ": 36700, - "ĠSurprise": 36701, - "ĠHao": 36702, - "Ġìµľê³ł": 36703, - "ĠGW": 36704, - "ĠInner": 36705, - "Ġquieren": 36706, - "Ġminded": 36707, - "Ġsupercomputer": 36708, - "Ġdiagrams": 36709, - "íĬľë": 36710, - "ê²łìĸ´": 36711, - "ĠобÑĬÑıÑģ": 36712, - "Ġestaban": 36713, - "Ġdestroys": 36714, - "ĠBreaking": 36715, - "ĠkarÄ±ÅŁ": 36716, - "Ġrebuilding": 36717, - "ľëĮĢ": 36718, - "ливо": 36719, - "ĠSauce": 36720, - "ĠFusion": 36721, - "×ķ×ŀ×": 36722, - "ĠQuinn": 36723, - "Ġgauche": 36724, - "ĠÙĪØ£": 36725, - "ĠÈ": 36726, - "çĵľ": 36727, - "Ġtechno": 36728, - "Ġdispatch": 36729, - "ĠaÅŁk": 36730, - "Ġeinzel": 36731, - "ĠGmail": 36732, - "çŀ": 36733, - "Ġê°ľìĿ¸": 36734, - "ĠÑģемÑĮ": 36735, - "Ġjourneys": 36736, - "Ġiht": 36737, - "Ġfibre": 36738, - "Ġdramas": 36739, - "ouched": 36740, - "Ġrename": 36741, - "ĠопеÑĢ": 36742, - "Ġpoo": 36743, - "ĠDru": 36744, - "ĠиÑĤог": 36745, - "Ġzast": 36746, - "Ġcoz": 36747, - "Ġzucch": 36748, - "Ġobtaining": 36749, - "Ġcommute": 36750, - "Ġsubmer": 36751, - "ĠVish": 36752, - "ĠRabb": 36753, - "ogg": 36754, - "Ġhut": 36755, - "íĸĪìĸ´": 36756, - "æ¯Ķå¦Ĥ": 36757, - "eremi": 36758, - "Ġμα": 36759, - "Ġdiskut": 36760, - "ĠбÑĥк": 36761, - "Ġimpaired": 36762, - "depend": 36763, - "ĠÙĪا": 36764, - "ĠÑĢÑĥк": 36765, - "ĠбаÑĢ": 36766, - "Ġoxidation": 36767, - "Ġsituação": 36768, - "ÉĻn": 36769, - "ução": 36770, - "Ġsagte": 36771, - "ĠSER": 36772, - "ĠCake": 36773, - "Ġturmeric": 36774, - "ĠKak": 36775, - "bung": 36776, - "ĠKá¹Ľá¹£á¹ĩa": 36777, - "Ġpoisoning": 36778, - "Ġslipping": 36779, - "ĠSays": 36780, - "å°±åı¯ä»¥": 36781, - "òng": 36782, - "çŁ³": 36783, - "«": 36784, - "ĠClaudia": 36785, - "ĠCharacter": 36786, - "ниÑĨ": 36787, - "coat": 36788, - "Ġprogressed": 36789, - "ĠFergus": 36790, - "Ġìĺ¤ëĬ": 36791, - "Ġoat": 36792, - "ordable": 36793, - "ĠLey": 36794, - "ĠHeraus": 36795, - "Ġresultados": 36796, - "ĠKayla": 36797, - "Ġriff": 36798, - "Ġchegou": 36799, - "Ġxi": 36800, - "Ġspacious": 36801, - "Ġrecognised": 36802, - "Ġech": 36803, - "ĠTie": 36804, - "Ġlauncher": 36805, - "Jim": 36806, - "Ġsuppression": 36807, - "ĠImpossible": 36808, - "Ġguitars": 36809, - "ĠFourier": 36810, - "иÑĩеÑģкий": 36811, - "ĠTherap": 36812, - "ĠKaf": 36813, - "centered": 36814, - "ĠÑģооÑĤвеÑĤ": 36815, - "Ġklim": 36816, - "Ġcarbohydrates": 36817, - "ignant": 36818, - "ĠAstron": 36819, - "Ġemple": 36820, - "Ġdrastic": 36821, - "ĠмиÑĢе": 36822, - "вин": 36823, - "uw": 36824, - "Ġprettier": 36825, - "Ġdonuts": 36826, - "ĠAthena": 36827, - "Ġdissert": 36828, - "Ġplante": 36829, - "Ġuranium": 36830, - "ìĿĮë": 36831, - "aré": 36832, - "Ġrzecz": 36833, - "Ġdisplaying": 36834, - "æĪ²": 36835, - "Ġsarc": 36836, - "rão": 36837, - "Ġtampoco": 36838, - "Ġphilosophers": 36839, - "ĠRecht": 36840, - "æĵļ": 36841, - "Ġcomentarios": 36842, - "yse": 36843, - "Ġìľ¤": 36844, - "Ġmise": 36845, - "ĠGin": 36846, - "Ġном": 36847, - "ĠFROM": 36848, - "liner": 36849, - "atif": 36850, - "ĠspoÅĤec": 36851, - "xa": 36852, - "ĠÑĤÑĢÑĥд": 36853, - "Ġwag": 36854, - "기ìĹIJ": 36855, - "ĠMG": 36856, - "Ġoffspring": 36857, - "ĠUnderstanding": 36858, - "åıªæĺ¯": 36859, - "ORA": 36860, - "Ġwhirring": 36861, - "Ġsurrend": 36862, - "Ġpoker": 36863, - "Ġmonuments": 36864, - "ĠâĻ©": 36865, - "Ġorganised": 36866, - "ĠSozial": 36867, - "ĠFactory": 36868, - "Ñħа": 36869, - "Ġresemble": 36870, - "зд": 36871, - "Ġexplosions": 36872, - "Ġpayroll": 36873, - "Ġomn": 36874, - "ĠJorge": 36875, - "ιÏĥ": 36876, - "Ġfracture": 36877, - "Ġpersecution": 36878, - "Ġdemais": 36879, - "ECH": 36880, - ",)": 36881, - "Ġcriar": 36882, - "ĠJOSH": 36883, - "Ġdemographics": 36884, - "Ġ1600": 36885, - "Ġcurrencies": 36886, - "ĠTips": 36887, - "ĠéĢĻåĢĭ": 36888, - "ĠRefer": 36889, - "ĠDancing": 36890, - "Ġinconsistent": 36891, - "Ġdeh": 36892, - "Ġimmens": 36893, - "Ġmeist": 36894, - "Ġimpatient": 36895, - "Ġbehaves": 36896, - "æĿ¾": 36897, - "ĠëĤ´ìļ©": 36898, - "Ġbackstory": 36899, - "Ġagreeing": 36900, - "ĠÅģ": 36901, - "ihin": 36902, - "Ġtemperatura": 36903, - "ĠBackground": 36904, - "Ġnutzen": 36905, - "Ġëħ¹": 36906, - "ĠMänner": 36907, - "Ġcollaborations": 36908, - "ĠKos": 36909, - "éģİåİ»": 36910, - "Ġnightmares": 36911, - "ëĵ±": 36912, - "ĠQueensland": 36913, - "Ġassociates": 36914, - "ĠKok": 36915, - "Ġfactorial": 36916, - "ĠHyung": 36917, - "Ġê·¸ëĭ¤ìĿĮ": 36918, - "Ġfilho": 36919, - "Ġelét": 36920, - "Ġíĸīë³µ": 36921, - "°±": 36922, - "Ġgefunden": 36923, - "Ġsemicondu": 36924, - "Ġcounselors": 36925, - "ĠUpper": 36926, - "ĠAub": 36927, - "ickers": 36928, - "Ver": 36929, - "Ġnorthwest": 36930, - "ĠMaintenant": 36931, - "ĠLakes": 36932, - "аÑıв": 36933, - "inté": 36934, - "ì°½": 36935, - "Ġгаз": 36936, - "Ġgiorn": 36937, - "Ġdigitally": 36938, - "ĠCircuit": 36939, - "ì¼Ģ": 36940, - "ãĤĬãģ¾ãģĹãģŁ": 36941, - "Ġcheerful": 36942, - "ĠPeterson": 36943, - "ĠDanish": 36944, - "ativos": 36945, - "Ġliken": 36946, - "Ġharbor": 36947, - "алиÑģÑĤ": 36948, - "xe": 36949, - "Ġcurls": 36950, - "ĠRhod": 36951, - "End": 36952, - "ĠET": 36953, - "Ġacquaint": 36954, - "ĠKelvin": 36955, - "Ġtrif": 36956, - "ĠAway": 36957, - "ìŀIJëĬĶ": 36958, - "vs": 36959, - "Ġpágina": 36960, - "Ġinlet": 36961, - "ĠSantos": 36962, - "Ġìļ°ìĻĢ": 36963, - "Ġyapıyorsun": 36964, - "theme": 36965, - "Ġsouff": 36966, - "Ġinjected": 36967, - "Ġpóźniej": 36968, - "iverso": 36969, - "amped": 36970, - "Ġdaher": 36971, - "Ġdagger": 36972, - "ĠлÑİбим": 36973, - "Ġtummy": 36974, - "Ġenlightened": 36975, - "cents": 36976, - "ĠDah": 36977, - "Ġcuest": 36978, - "ä¾Ĩ說": 36979, - "ILY": 36980, - "Ġ×ijר": 36981, - "Ġbanging": 36982, - "ĠEmil": 36983, - "ĠCler": 36984, - "ĠBorder": 36985, - "ижÑĥ": 36986, - "Ġpresenters": 36987, - "ĠSTUD": 36988, - "coins": 36989, - "ĠíĻį": 36990, - "Ġperks": 36991, - "Ġparap": 36992, - "Ġcertaines": 36993, - "ĠLore": 36994, - "öst": 36995, - "ĠMARTIN": 36996, - "Ġbios": 36997, - "Ġwhereby": 36998, - "verts": 36999, - "ĠMiranda": 37000, - "Ġstip": 37001, - "澤": 37002, - "andez": 37003, - "׼׾": 37004, - "ujin": 37005, - "Ġê¾": 37006, - "Ġallergies": 37007, - "plate": 37008, - "Ġyapıl": 37009, - "Ġundertake": 37010, - "ĠëĤĺê°Ģ": 37011, - "Part": 37012, - "Ġkızım": 37013, - "hguru": 37014, - "ãģĤãģ¨": 37015, - "ĠJohns": 37016, - "Ġeyelashes": 37017, - "Ġdrained": 37018, - "ĠstÃ¥r": 37019, - "ãģĤãĤĬãģ¾ãģĻ": 37020, - "ĠJade": 37021, - "Ġcalend": 37022, - "film": 37023, - "Ġmesa": 37024, - "Ġludzie": 37025, - "Ġattracts": 37026, - "Ġjuices": 37027, - "Ġкил": 37028, - "Ġnieuwe": 37029, - "Ġmencion": 37030, - "Ġignition": 37031, - "Ġbladder": 37032, - "andaag": 37033, - "ĠExtension": 37034, - "íĤ¨": 37035, - "feed": 37036, - "ĠÙĪÙĩ": 37037, - "Ġspun": 37038, - "Ġtät": 37039, - "оÑĢоÑĤ": 37040, - "tyard": 37041, - "ronics": 37042, - "ĠHuge": 37043, - "Ñĥжд": 37044, - "string": 37045, - "Ġunjust": 37046, - "Ġprawn": 37047, - "Ġfrosting": 37048, - "Ġdisappearance": 37049, - "iosa": 37050, - "Ġcardi": 37051, - "ĠPriest": 37052, - "ĠcientÃŃfic": 37053, - "åĵªè£¡": 37054, - "ĠÐĴаÑģ": 37055, - "Ġë¶Ģíĥģ": 37056, - "Ġthieves": 37057, - "Ġphysique": 37058, - "ĠEugene": 37059, - "Ġблиз": 37060, - "Ġmonopoly": 37061, - "Ġbiography": 37062, - "ĠhoÅŁ": 37063, - "Ġtö": 37064, - "mac": 37065, - "Ġshocks": 37066, - "ìĦ¸ë": 37067, - "hit": 37068, - "Ġsnug": 37069, - "Ġincl": 37070, - "Ġdedic": 37071, - "Ġultras": 37072, - "ĠизвеÑģÑĤ": 37073, - "Ġutilization": 37074, - "ĠÑģовеÑĢÑĪенно": 37075, - "Ġservi": 37076, - "stag": 37077, - "180": 37078, - "Ġsewer": 37079, - "ĠChoice": 37080, - "Ġdischarged": 37081, - "ĠJD": 37082, - "олеÑĤ": 37083, - "ĠкваÑĢÑĤи": 37084, - "Ġtelescop": 37085, - "ĠJeÅĽli": 37086, - "ĠNana": 37087, - "cale": 37088, - "ĠÑĤон": 37089, - "mmm": 37090, - "äºĨåIJ§": 37091, - "Ġgehabt": 37092, - "ëĤł": 37093, - "æĬķ": 37094, - "à¸Ļà¸Ļ": 37095, - "Ġether": 37096, - "Ġzen": 37097, - "Ġresearched": 37098, - "ĠCzyli": 37099, - "å®Įåħ¨": 37100, - "workers": 37101, - "Ġ경찰": 37102, - "Ġsheriff": 37103, - "allo": 37104, - "Ġtipos": 37105, - "Ġprosecution": 37106, - "Ġfrogs": 37107, - "Ġfalt": 37108, - "jd": 37109, - "ĠíĮĶ": 37110, - "Ġfiltered": 37111, - "ĠOft": 37112, - "Ġìį": 37113, - "Ġdisfr": 37114, - "ĠMustang": 37115, - "Ġwoah": 37116, - "ĠREALLY": 37117, - "Ġмогли": 37118, - "Ġentrada": 37119, - "ĠигÑĢа": 37120, - "Ġmixes": 37121, - "ĠавÑĤомоб": 37122, - "ÐĻ": 37123, - "Ġshin": 37124, - "Ġparanormal": 37125, - "Ġsomeplace": 37126, - "Ġdishon": 37127, - "etaan": 37128, - "Ġfuerte": 37129, - "Ù¹": 37130, - "Ġdoom": 37131, - "ìĪľ": 37132, - "Ġexistential": 37133, - "Ġbuld": 37134, - "ĠSDK": 37135, - "ĠпÑĢавда": 37136, - "Ġturnover": 37137, - "ĠìĹ¬ê¸°ìĹIJ": 37138, - "Ġह": 37139, - "Ġmodeled": 37140, - "Ġbugün": 37141, - "Ġexperimentation": 37142, - "Ġmornings": 37143, - "Ġmedo": 37144, - "Stevie": 37145, - "Ġplayable": 37146, - "Ġairlines": 37147, - "gments": 37148, - "Ġ기ë¶Ħ": 37149, - "ĠTomb": 37150, - "ĠMVP": 37151, - "AUDIENCE": 37152, - "Ġcheckout": 37153, - "Ġpasst": 37154, - "Ġbeispiel": 37155, - "ĠLinks": 37156, - "heavy": 37157, - "Ġquestionable": 37158, - "Ġìĵ°ë": 37159, - "Ġsill": 37160, - "Ġmanipulated": 37161, - "ĠLoren": 37162, - "Ġìľ¼": 37163, - "Ġverge": 37164, - "ák": 37165, - "IES": 37166, - "Ġsabot": 37167, - "ĠCustomer": 37168, - "ależy": 37169, - "Ġnominee": 37170, - "ĠGad": 37171, - "Ġnouvelles": 37172, - "ĠSPE": 37173, - "istling": 37174, - "Ġoval": 37175, - "обÑĢаж": 37176, - "ifty": 37177, - "éĩİ": 37178, - "Ġbezel": 37179, - "yet": 37180, - "Ġfreight": 37181, - "ĠHanım": 37182, - "rÃŃa": 37183, - "Ġzoning": 37184, - "Ġindem": 37185, - "ĠBü": 37186, - "Ġfeminism": 37187, - "Ġvoix": 37188, - "Ġoficial": 37189, - "Ġdiyorum": 37190, - "»IJ": 37191, - "Ġarose": 37192, - "Ġparar": 37193, - "ìĿ¸ì§Ģ": 37194, - "ĠMartine": 37195, - "ĠLect": 37196, - "Ġrester": 37197, - "Ġdrowning": 37198, - "uya": 37199, - "cida": 37200, - "ĠAriel": 37201, - "Ġ02": 37202, - "Ġ×Ķ×Ķ": 37203, - "ç´ł": 37204, - "ĠWert": 37205, - "ТÑĭ": 37206, - "Ġwidow": 37207, - "Ġparchment": 37208, - "Ġcottage": 37209, - "ĠXL": 37210, - "ĠSlack": 37211, - "ĠNES": 37212, - "Ġrobe": 37213, - "Ġgimm": 37214, - "Ġcaminho": 37215, - "ĠHarper": 37216, - "Ġcitrus": 37217, - "Ġfirefighters": 37218, - "Ġdopamine": 37219, - "elets": 37220, - "Ġdemocrat": 37221, - "ìłľë¡ľ": 37222, - "Ġplayback": 37223, - "oj": 37224, - "ĠпÑĢок": 37225, - "ĠSullivan": 37226, - "semble": 37227, - "ĠWorth": 37228, - "ĠMustafa": 37229, - "าร": 37230, - "Ġmets": 37231, - "éĸĢ": 37232, - "лоÑģÑĮ": 37233, - "Ġinertia": 37234, - "Ġuniforms": 37235, - "足": 37236, - "ério": 37237, - "×ķר×Ķ": 37238, - "ént": 37239, - "Ġà®Ĵ": 37240, - "ĠÑģамÑĭÑħ": 37241, - "Ġvoulais": 37242, - "ĠZimmer": 37243, - "ê²łë": 37244, - "ĠноÑģ": 37245, - "encias": 37246, - "Ġrelación": 37247, - "Ġ걸ë": 37248, - "Ġfaction": 37249, - "Ġgosp": 37250, - "полож": 37251, - "nap": 37252, - "hak": 37253, - "Ġproceedings": 37254, - "ĠìĨĶ": 37255, - "ìķĦëĭĪ": 37256, - "ĠìŀIJ기": 37257, - "Ġwerd": 37258, - "Ġsof": 37259, - "Ġschlim": 37260, - "Ġflavored": 37261, - "Ġquadratic": 37262, - "ĠBoot": 37263, - "Ġpublicity": 37264, - "ĠCaro": 37265, - "Ġ?\"": 37266, - "ниÑĨа": 37267, - "mania": 37268, - "ĠSUR": 37269, - "ĠBUR": 37270, - "lance": 37271, - "ética": 37272, - "Ġzobaczy": 37273, - "Ġtrio": 37274, - "sama": 37275, - "ĠtaÅŁ": 37276, - "Ġasymm": 37277, - "resser": 37278, - "Ġتع": 37279, - "ĠпеÑģ": 37280, - "Ġbeginnings": 37281, - "ladım": 37282, - "ĠбÑĭÑģÑĤÑĢ": 37283, - "Ġmoo": 37284, - "ĠGeneva": 37285, - "Ġåľ¨": 37286, - "erus": 37287, - "borah": 37288, - "Ġrefusing": 37289, - "bull": 37290, - "ĠWaiting": 37291, - "ĠIndividual": 37292, - "Ġanonym": 37293, - "imens": 37294, - "Ġmedidas": 37295, - "Ġfragrant": 37296, - "Ġdirectement": 37297, - "ĠìķĦë§Ī": 37298, - "uria": 37299, - "Ġspherical": 37300, - "Ġabge": 37301, - "ĠVictorian": 37302, - "Ġspectacle": 37303, - "ĠRodriguez": 37304, - "Ġocup": 37305, - "ĠNär": 37306, - "marks": 37307, - "ngulo": 37308, - "ĠLuci": 37309, - "Ġshouted": 37310, - "Ġregulators": 37311, - "ÄŁini": 37312, - "Ġdisent": 37313, - "ĠÑĢÑĭн": 37314, - "ëĤ¨": 37315, - "ĠìĤ´ë": 37316, - "Ġproblèmes": 37317, - "ĠFinger": 37318, - "assemble": 37319, - "Ġpear": 37320, - "Ġdroite": 37321, - "ĠEverywhere": 37322, - "tam": 37323, - "оÑĤив": 37324, - "вой": 37325, - "ordinate": 37326, - "ĠLak": 37327, - "ĠmỼi": 37328, - "ĠTelevision": 37329, - "Ġexponentially": 37330, - "avas": 37331, - "Ġblev": 37332, - "ĠMT": 37333, - "俺": 37334, - "Connell": 37335, - "ĠêµŃ민": 37336, - "ĠÑģвоим": 37337, - "Ġacha": 37338, - "ĠDynasty": 37339, - "Jin": 37340, - "Ġtore": 37341, - "Ġflor": 37342, - "Ġмногие": 37343, - "æ²Ĵäºĭ": 37344, - "owan": 37345, - "bah": 37346, - "Ġì£Ħ": 37347, - "ĠCela": 37348, - "Ġìµľê·¼": 37349, - "Ġpermettre": 37350, - "Ġabras": 37351, - "Ġverstehen": 37352, - "Ġescort": 37353, - "ĠThem": 37354, - "ärke": 37355, - "porter": 37356, - "Ġkahkaha": 37357, - "Ġhect": 37358, - "Ġdau": 37359, - "wah": 37360, - "olve": 37361, - "ĠAges": 37362, - "schaft": 37363, - "ĠStell": 37364, - "nelle": 37365, - "ĠEnsuite": 37366, - "ĠÐĴÑģем": 37367, - "Ġcréd": 37368, - "ĠPP": 37369, - "lords": 37370, - "grunting": 37371, - "Ġcontraction": 37372, - "Got": 37373, - "Ġacquiring": 37374, - "Ġsopr": 37375, - "Ġpoisonous": 37376, - "RNA": 37377, - "Ġanar": 37378, - "ĠHof": 37379, - "')": 37380, - "Ġremarkably": 37381, - "Ġinternacional": 37382, - "ücke": 37383, - "inqu": 37384, - "Ġduy": 37385, - "Ġbeasts": 37386, - "ĠLAN": 37387, - "Ġprecedent": 37388, - "ĠRPM": 37389, - "åij¨": 37390, - "Ġselon": 37391, - "Ġmorte": 37392, - "Ġcomeçou": 37393, - "Ñıла": 37394, - "Ġinterpreting": 37395, - "ĠBurke": 37396, - "ÑĤÑĢа": 37397, - "ĠìĿ´ëŁ¬": 37398, - "Ġpessim": 37399, - "ĠNok": 37400, - "íĮĿ": 37401, - "Female": 37402, - "Ġìĭ¤í": 37403, - "ĻĢ": 37404, - "Ġstimulation": 37405, - "Ġslick": 37406, - "Ġê°ĢëĬĶ": 37407, - "Ġказ": 37408, - "ĠHBO": 37409, - "Ġpapier": 37410, - "Ġkönnten": 37411, - "Ñĥбли": 37412, - "ĠConstant": 37413, - "SPEAKING": 37414, - "ĠktórÄħ": 37415, - "Ġcosmetics": 37416, - "ĠTrend": 37417, - "Ġrobbery": 37418, - "Ġtitt": 37419, - "Ġgjort": 37420, - "Ġdietary": 37421, - "łĮ": 37422, - "ĠKirby": 37423, - "ĠпÑĢимеÑĢно": 37424, - "Ġqualification": 37425, - "Ġìķī": 37426, - "Ġcabinets": 37427, - "Ġhttp": 37428, - "ĠErica": 37429, - "義": 37430, - "Ġdisadvantages": 37431, - "Ġchattering": 37432, - "yz": 37433, - "feit": 37434, - "Ġguild": 37435, - "ĠETF": 37436, - "ĠDragons": 37437, - "ĠHERE": 37438, - "venth": 37439, - "ÙĦاÙħ": 37440, - "Ġmarché": 37441, - "Dam": 37442, - "Ġphoton": 37443, - "Ġestable": 37444, - "Mag": 37445, - "Ġolhar": 37446, - "Ġcoupling": 37447, - "ĠHilfe": 37448, - "ĠWizard": 37449, - "Ġмало": 37450, - "help": 37451, - "ĠlÃŃnea": 37452, - "Ġì«": 37453, - "Ġstandalone": 37454, - "Ġmorale": 37455, - "Ġzweite": 37456, - "ãĤĪãĤįãģĹãģı": 37457, - "ährt": 37458, - "Ġdotted": 37459, - "Ġdripping": 37460, - "ĠFlag": 37461, - "éĿĴ": 37462, - "rocket": 37463, - "rategy": 37464, - "irim": 37465, - "Ġíķĺë©´ìĦľ": 37466, - "Ġsogenan": 37467, - "ĠUno": 37468, - "ĠSchutz": 37469, - "Ġestilo": 37470, - "ĠSubs": 37471, - "ĠDaisy": 37472, - "ÐĿеÑĤ": 37473, - "'...": 37474, - "Ġplatinum": 37475, - "Ġbirl": 37476, - "ĠSovi": 37477, - "Ġviolate": 37478, - "ÑĥеÑĤÑģÑı": 37479, - "rill": 37480, - "Ġtraz": 37481, - "Ġsnip": 37482, - "Ġcumpl": 37483, - "à¸Ńà¸ģ": 37484, - "Ġcuk": 37485, - "éħĴ": 37486, - "ĠParlament": 37487, - "Ġhypert": 37488, - "Ġpulp": 37489, - "Ġtongues": 37490, - "atto": 37491, - "Ġbusca": 37492, - "ihn": 37493, - "ERO": 37494, - "ĠÙĬع": 37495, - "Ġvarias": 37496, - "ĠMarian": 37497, - "Ġbounded": 37498, - "Ġpitching": 37499, - "Ġdeficiency": 37500, - "ĠBlessed": 37501, - "ĠExerc": 37502, - "uchs": 37503, - "ĠnhÆ°ng": 37504, - "æľ¬å½ĵ": 37505, - "Ġraped": 37506, - "hales": 37507, - "Ġmala": 37508, - "pic": 37509, - "Ġ401": 37510, - "ÅĽniej": 37511, - "arina": 37512, - "ëĵ¤ìĿĦ": 37513, - "otti": 37514, - "Ġдолго": 37515, - "Ġtracker": 37516, - "ĠShelby": 37517, - "Ġvanished": 37518, - "Ġbakery": 37519, - "Kapı": 37520, - "Jesus": 37521, - "ĠKR": 37522, - "JO": 37523, - "ħ¸": 37524, - "Ġdiscs": 37525, - "ìĦ¯": 37526, - "ì§Ģë": 37527, - "×Ļצ": 37528, - "emary": 37529, - "Kendra": 37530, - "Ġyük": 37531, - "ückt": 37532, - "Ġvaz": 37533, - "Ġkup": 37534, - "aktu": 37535, - "ĠÑģпаÑģибо": 37536, - "Ġaik": 37537, - "Ġnursery": 37538, - "Ġendangered": 37539, - "êmement": 37540, - "ematics": 37541, - "Ġresponders": 37542, - "ĠRepresentatives": 37543, - "Ġsculptures": 37544, - "igkeiten": 37545, - "Ġdepl": 37546, - "Ġinterpretations": 37547, - "Ġdeadlines": 37548, - "Ġ1942": 37549, - "ÃĹ": 37550, - "Ġsugars": 37551, - "emu": 37552, - "lively": 37553, - "Ġrecreational": 37554, - "Ġdistort": 37555, - "Ġunderscore": 37556, - "Ġunquote": 37557, - "Ġsafest": 37558, - "Ġswollen": 37559, - "Ġanalyses": 37560, - "Ġcommencé": 37561, - "妹": 37562, - "andin": 37563, - "ĠХоÑĢоÑĪо": 37564, - "Ġdiarr": 37565, - "ãģ¾ãģģ": 37566, - "ziest": 37567, - "Ġtoothbrush": 37568, - "éł»éģĵ": 37569, - "uations": 37570, - "Ġcade": 37571, - "Ġbacklash": 37572, - "hind": 37573, - "Ġrisque": 37574, - "zess": 37575, - "ĠìĿ´ìķ¼ê¸°": 37576, - "Ġesperar": 37577, - "Ġtranslations": 37578, - "ioned": 37579, - "groans": 37580, - "ĠпÑĥÑĤ": 37581, - "Ġgenetically": 37582, - "éĢł": 37583, - "Ġhappiest": 37584, - "Ġwerk": 37585, - "atoon": 37586, - "Ġmusi": 37587, - "Ġfunção": 37588, - "ĠìŀħëĭĪëĭ¤": 37589, - "ĠÑĢай": 37590, - "Ġbevor": 37591, - "BLANK": 37592, - "Ġrepentance": 37593, - "Put": 37594, - "Ġpotrzeb": 37595, - "Ġsala": 37596, - "Ġcampa": 37597, - "WER": 37598, - "ĠdecÃŃa": 37599, - "Ġsécurité": 37600, - "ĠAppreciate": 37601, - "Ñĩи": 37602, - "ĠRandom": 37603, - "ë³Ħ": 37604, - "kah": 37605, - "Ġmöj": 37606, - "Ġsäger": 37607, - "Ġ×Ļ׼×ķ׾": 37608, - "Ġ190": 37609, - "xtures": 37610, - "Eu": 37611, - "Ġgä": 37612, - "Ġ×ijת": 37613, - "ĠCroat": 37614, - "apo": 37615, - "PLE": 37616, - "Ġpersistence": 37617, - "åĬ©": 37618, - "Ġblends": 37619, - "Ġtreffen": 37620, - "ĠSantiago": 37621, - "ydia": 37622, - "aldo": 37623, - "ĠTensorFlow": 37624, - "ĠDual": 37625, - "ãĥľ": 37626, - "Ġchiff": 37627, - "ìĹ´": 37628, - "Ġcontracted": 37629, - "Ġsegreg": 37630, - "ĠFairy": 37631, - "Ġwisely": 37632, - "Ġvulnerabilities": 37633, - "Ġhandheld": 37634, - "Ġgadgets": 37635, - "ĠboÅŁ": 37636, - "ĠPopular": 37637, - "Ġcurvature": 37638, - "문": 37639, - "ĠMARY": 37640, - "ìĿ´ìĬ": 37641, - "Ġformulation": 37642, - "Ġcelery": 37643, - "Ġblurry": 37644, - "ĠTS": 37645, - "alez": 37646, - "Ġws": 37647, - "Ġprogramm": 37648, - "ĠStack": 37649, - "ĠJIM": 37650, - "овали": 37651, - "ıll": 37652, - "Ġpère": 37653, - "ĠKanye": 37654, - "ĠDelaware": 37655, - "Ġãģł": 37656, - "Ġdaunting": 37657, - "ĠбеÑģ": 37658, - "ĠStupid": 37659, - "big": 37660, - "fficial": 37661, - "Ġprecipitation": 37662, - "Ġplung": 37663, - "ục": 37664, - "burse": 37665, - "Ġdarle": 37666, - "Ġcripp": 37667, - "Ġpioneer": 37668, - "Ġdisput": 37669, - "Ġsean": 37670, - "ãģĵãĤĵãģª": 37671, - "Ġresistor": 37672, - "Ġallein": 37673, - "ipples": 37674, - "arel": 37675, - "Ġendors": 37676, - "zust": 37677, - "ĠÑĢебÑıÑĤа": 37678, - "eded": 37679, - "Ġì¹´ë©Ķë": 37680, - "Ġlleva": 37681, - "Ġkennt": 37682, - "Ġбал": 37683, - "ĠDocument": 37684, - "ĠKnights": 37685, - "Ġbuckle": 37686, - "Ġìī¬": 37687, - "Ġalk": 37688, - "ĠEveryday": 37689, - "atters": 37690, - "Ġtoilets": 37691, - "Ġjugar": 37692, - "ĠìŀĪì§Ģ": 37693, - "Ġgenauso": 37694, - "ĠLandesregierung": 37695, - "ãģ£ãģ±": 37696, - "ije": 37697, - "Ġtrailers": 37698, - "ĠTigers": 37699, - "Ġgitti": 37700, - "Ġforgiving": 37701, - "Ġconcurrent": 37702, - "ĠVu": 37703, - "ĠíĬ¹íŀĪ": 37704, - "ĠBROWN": 37705, - "ounded": 37706, - "\";": 37707, - "Ġtremb": 37708, - "Ġtiet": 37709, - "ĠÑĢежим": 37710, - "Ġnutshell": 37711, - "елиÑĩ": 37712, - "Ġlosers": 37713, - "ricting": 37714, - "Ġredeem": 37715, - "defined": 37716, - "Nice": 37717, - "Ġbroadband": 37718, - "KO": 37719, - "Ġteasing": 37720, - "Ġpartisan": 37721, - "ıma": 37722, - "Ġìŀ¬ë¯¸": 37723, - "ĠJourney": 37724, - "Ġslopes": 37725, - "uning": 37726, - "grunts": 37727, - "Ġtäll": 37728, - "Ġuncovered": 37729, - "ĠmyÅĽlÄĻ": 37730, - "ĠEsther": 37731, - "äºİ": 37732, - "ĠHealthy": 37733, - "Ġë°ij": 37734, - "rée": 37735, - "Ġpolarization": 37736, - "Ġflav": 37737, - "Ġcambiar": 37738, - "Ġyr": 37739, - "ĠRanch": 37740, - "Ġsplits": 37741, - "Ġtrouvé": 37742, - "åľĭ家": 37743, - "Ġrecorder": 37744, - "Ġdépart": 37745, - "ÙĪب": 37746, - "ĠKry": 37747, - "Ġinteressant": 37748, - "Ġederim": 37749, - "ÅĽwiad": 37750, - "ilateral": 37751, - "wright": 37752, - "Ġpourra": 37753, - "êter": 37754, - "Ġcamel": 37755, - "áŀ": 37756, - "Ġrapidement": 37757, - "Ġmej": 37758, - "Ġstiffness": 37759, - "ADAS": 37760, - "Ġdiffers": 37761, - "Ġalot": 37762, - "ĠSig": 37763, - "ÑıÑĤелÑĮ": 37764, - "Ġabstraction": 37765, - "åľĺ": 37766, - "Ġkeiner": 37767, - "grupp": 37768, - "ĠSherlock": 37769, - "íĺĶ": 37770, - "Ġcite": 37771, - "Ġoverflow": 37772, - "Ġtại": 37773, - "úcar": 37774, - "bula": 37775, - "Ġconjunto": 37776, - "ĠCI": 37777, - "Ġmoderator": 37778, - "Ġindirectly": 37779, - "Ġalleine": 37780, - "âĤ": 37781, - "ÑĪиб": 37782, - "Ġбаб": 37783, - "Ġdanach": 37784, - "Ġ1939": 37785, - "Ġpromet": 37786, - "Ġdestinations": 37787, - "ĠIllust": 37788, - "ικÏĮ": 37789, - "Ġsabes": 37790, - "Ġheh": 37791, - "ĠGesetzent": 37792, - "ĠMiz": 37793, - "енко": 37794, - "ĠMys": 37795, - "Ь": 37796, - "ĠJudaism": 37797, - "Ġmustache": 37798, - "Ġstimmt": 37799, - "ĠGaza": 37800, - "Ġvolte": 37801, - "Ġnuo": 37802, - "Ġmón": 37803, - "ĠComput": 37804, - "ูà¹Ī": 37805, - "ĠRadi": 37806, - "Ġexceptionally": 37807, - "Ġassumes": 37808, - "éĸĭå¿ĥ": 37809, - "ãģĪãģ°": 37810, - "inform": 37811, - "Ġshrine": 37812, - "æĵĬ": 37813, - "Ġimplication": 37814, - "ĠFitz": 37815, - "æ²ĴéĹľä¿Ĥ": 37816, - "!.": 37817, - "Ġlt": 37818, - "Ġalloy": 37819, - "Ġethic": 37820, - "Ġmonastery": 37821, - "ìĭľì£ł": 37822, - "icação": 37823, - "Ġcoordinating": 37824, - "ĠMoto": 37825, - "Ġoverlook": 37826, - "Ġchois": 37827, - "Ġantibiotic": 37828, - "ĠMinne": 37829, - "ĠBJ": 37830, - "ĠApa": 37831, - "orian": 37832, - "Ġspilled": 37833, - "Jam": 37834, - "Ġhusbands": 37835, - "Ġcreations": 37836, - "Ġañ": 37837, - "üssel": 37838, - "ĠìĿ´ìļ©": 37839, - "Ġanalyse": 37840, - "rose": 37841, - "Ġpunched": 37842, - "Ġpresque": 37843, - "Ġastronomy": 37844, - "Ġschwierig": 37845, - "ĠEbola": 37846, - "Ġcis": 37847, - "Ġacet": 37848, - "ĠFX": 37849, - "endre": 37850, - "ĠìĿĮìķħ": 37851, - "Ġwebpage": 37852, - "Ġfreaked": 37853, - "Ġlatte": 37854, - "Ġì¿ł": 37855, - "Ġ머ë": 37856, - "Never": 37857, - "Gra": 37858, - "íĻĶ를": 37859, - "eyed": 37860, - "Ġë°ľëĿ¼": 37861, - "Ġespera": 37862, - "Ġaparece": 37863, - "ração": 37864, - "Ġdisruptive": 37865, - "ĠJoint": 37866, - "urous": 37867, - "reas": 37868, - "ĠquerÃŃa": 37869, - "Ġdistributions": 37870, - "Ġexponent": 37871, - "ì¹ĺ를": 37872, - "Ġdl": 37873, - "zhou": 37874, - "ĠHearing": 37875, - "å·®ä¸įå¤ļ": 37876, - "ĠCraw": 37877, - "Ġfloats": 37878, - "ounced": 37879, - "Lab": 37880, - "World": 37881, - "Ġburdens": 37882, - "Ġauthoritarian": 37883, - "ĠBolt": 37884, - "ĠоднÑĥ": 37885, - "Ġpigeon": 37886, - "Ġdistractions": 37887, - "ĠHerausforder": 37888, - "Ġzest": 37889, - "esc": 37890, - "Ġshakes": 37891, - "atas": 37892, - "ĠÙħØ´": 37893, - "holes": 37894, - "Ġthinkers": 37895, - "alta": 37896, - "Ġarche": 37897, - "ĠSuk": 37898, - "anha": 37899, - "Ġtempting": 37900, - "Ġyoutuber": 37901, - "Ġvì": 37902, - "ĠdziaÅĤa": 37903, - "ĠVatican": 37904, - "Park": 37905, - "Ġsupers": 37906, - "ĠNikki": 37907, - "ëĬIJë": 37908, - "orang": 37909, - "ramient": 37910, - "鬼": 37911, - "Ġê°ĸê³ł": 37912, - "Ġdesserts": 37913, - "Ġavere": 37914, - "ĠGregory": 37915, - "Ġëĵ¤ìĸ´ìĺ": 37916, - "Ġcosting": 37917, - "ĠClinic": 37918, - "Ġrebels": 37919, - "ĠMob": 37920, - "Ġbunlar": 37921, - "ĠYours": 37922, - "ertime": 37923, - "Ġretali": 37924, - "mara": 37925, - "atus": 37926, - "alles": 37927, - "ĠдÑĢ": 37928, - "ĠдиÑģ": 37929, - "Ġdiscounts": 37930, - "ĠGUY": 37931, - "Ġкакое": 37932, - "ĠExperiment": 37933, - "rement": 37934, - "ĠXiang": 37935, - "Ġbate": 37936, - "WE": 37937, - "Ġspecialize": 37938, - "Ġdeity": 37939, - "ĠLoki": 37940, - "mag": 37941, - "ĠNit": 37942, - "West": 37943, - "Ġmaternal": 37944, - "Ġquis": 37945, - "åŁºæľ¬": 37946, - "broken": 37947, - "Ġlasers": 37948, - "Ġhakk": 37949, - "ĠAngels": 37950, - "Ġmastery": 37951, - "antis": 37952, - "Tiffany": 37953, - "eee": 37954, - "çij": 37955, - "orem": 37956, - "Ġinacc": 37957, - "Ġjurisdictions": 37958, - "ĠKardash": 37959, - "æľº": 37960, - "Il": 37961, - "ĠSinn": 37962, - "åĭķçĶ»": 37963, - "Ġathletics": 37964, - "cÄĻ": 37965, - "Ġloosely": 37966, - "Ġdieta": 37967, - "Ag": 37968, - "Ġ??": 37969, - "ĠëĮĢíijľ": 37970, - "Ġsuperv": 37971, - "Ġnutrit": 37972, - "Ġdrifting": 37973, - "ĠìĦłìĥĿëĭĺ": 37974, - "ĠпонÑıл": 37975, - "ĠVictory": 37976, - "ÙĦØ©": 37977, - "×ķ׳×Ķ": 37978, - "ĠпиÑĪ": 37979, - "Ġshaved": 37980, - "Ġmesure": 37981, - "onden": 37982, - "Ùĥر": 37983, - "Ġexile": 37984, - "ĠDesde": 37985, - "ĠPinterest": 37986, - "Ġattachments": 37987, - "Ġhombres": 37988, - "Ġfines": 37989, - "ĠìĦ¸ìĥģ": 37990, - "Ġsleeps": 37991, - "ĠTaco": 37992, - "ĠIRA": 37993, - "rios": 37994, - "Ġoll": 37995, - "etes": 37996, - "Ġunut": 37997, - "fashioned": 37998, - "Ġtreball": 37999, - "ĠNearly": 38000, - "ĠÑĢеалÑĮно": 38001, - "Ġchil": 38002, - "éĢ±": 38003, - "ÄŁa": 38004, - "ĠMEL": 38005, - "roscop": 38006, - "ĠCG": 38007, - "Ġvenge": 38008, - "Ġdishwasher": 38009, - "algic": 38010, - "Ġmodifier": 38011, - "Ġembassy": 38012, - "timer": 38013, - "emics": 38014, - "Ġintricate": 38015, - "Ġevet": 38016, - "ĠëĮĢë°ķ": 38017, - "Ġisot": 38018, - "ĠнаÑĥÑĩ": 38019, - "ĠQuiz": 38020, - "reso": 38021, - "δÏİ": 38022, - "Ġyelled": 38023, - "Ġfeder": 38024, - "ELLER": 38025, - "Ġexceeded": 38026, - "onas": 38027, - "icano": 38028, - "ĠживоÑĤ": 38029, - "ĠMao": 38030, - "ĠKazuto": 38031, - "Ġãħĭãħĭãħĭãħĭ": 38032, - "Ġfrontline": 38033, - "ĠHungarian": 38034, - "Ġüberall": 38035, - "awat": 38036, - "Ġgrips": 38037, - "ições": 38038, - "arnya": 38039, - "ĠÍ¡": 38040, - "Ġseid": 38041, - "Ġanak": 38042, - "Ġacabou": 38043, - "íķij": 38044, - "Ġnotorious": 38045, - "ĠGodzilla": 38046, - "Ġovercoming": 38047, - "ĠPend": 38048, - "Ġolabilir": 38049, - "ülme": 38050, - "Ġerhalten": 38051, - "ãĤīãģĦ": 38052, - "ê·¹": 38053, - "ĠMeter": 38054, - "Ġstaan": 38055, - "Ol": 38056, - "Ġchats": 38057, - "ĠBuenos": 38058, - "ÃŃve": 38059, - "aluable": 38060, - "Ġstrategically": 38061, - "Ġcomprised": 38062, - "ĠпеÑĢÑģонаж": 38063, - "Ġwann": 38064, - "ĠCen": 38065, - "ниÑĤе": 38066, - "Łģ": 38067, - "ĠÑĤобой": 38068, - "iad": 38069, - "ĠkardeÅŁim": 38070, - "ĠCongressman": 38071, - "reaming": 38072, - "homme": 38073, - "Ġcommunaut": 38074, - "Ġalcoholic": 38075, - "Ġpickled": 38076, - "Ġacord": 38077, - "position": 38078, - "egól": 38079, - "Ġtroubling": 38080, - "ĠMarcheg": 38081, - "Ġzumindest": 38082, - "Ġseamlessly": 38083, - "Ġolun": 38084, - "ĠTVs": 38085, - "ĠпÑĢакÑĤиÑĩеÑģки": 38086, - "Ġbackend": 38087, - "ãģĵãĤĵãģ«ãģ¡ãģ¯": 38088, - "idable": 38089, - "Ġgadget": 38090, - "Ġfaço": 38091, - "ĠMarchegiani": 38092, - "Ġë°¤": 38093, - "Ġaccidental": 38094, - "ĠLP": 38095, - "Ġeldest": 38096, - "ĠAdmiral": 38097, - "ĠnÄĥm": 38098, - "lever": 38099, - "Ġpastel": 38100, - "Ġfondo": 38101, - "Connie": 38102, - "Ġtercer": 38103, - "Ġpact": 38104, - "ĠMonte": 38105, - "Ġmeats": 38106, - "ĠSMS": 38107, - "ĠAustralians": 38108, - "ç¼": 38109, - "Rhett": 38110, - "Ġexactement": 38111, - "Ġë¹¼": 38112, - "ĠMOD": 38113, - "ç¡": 38114, - "ĠRapt": 38115, - "ĠNoch": 38116, - "Ġabort": 38117, - "ĠNaval": 38118, - "ĠFuji": 38119, - "INTER": 38120, - "ĠновÑĭй": 38121, - "Ġmiejsce": 38122, - "ĠICU": 38123, - "ĠGraduate": 38124, - "ĠGlen": 38125, - "ardi": 38126, - "ĠÈĺ": 38127, - "Ġsolder": 38128, - "Ġprofessions": 38129, - "Ġorthog": 38130, - "omn": 38131, - "introdu": 38132, - "ĠDenise": 38133, - "ìŀIJ를": 38134, - "Ġcorrespondence": 38135, - "AMA": 38136, - "Ġinflict": 38137, - "Ġfand": 38138, - "ĠGü": 38139, - "ĠÑĩеÑĤ": 38140, - "Ġtraced": 38141, - "Ġpatents": 38142, - "Ġambush": 38143, - "Ġlotta": 38144, - "ffer": 38145, - "ĠWagner": 38146, - "Ġimperson": 38147, - "Ġextrêmement": 38148, - "ÙĤت": 38149, - "conduct": 38150, - "Att": 38151, - "ĠMueller": 38152, - "ĠAlicia": 38153, - "Ġcyc": 38154, - "Ġhacker": 38155, - "Ġtys": 38156, - "Ġhail": 38157, - "ĠзаÑıв": 38158, - "Ġpasso": 38159, - "Ġì¶Ķê°Ģ": 38160, - "ĠÎĪ": 38161, - "Ġpackaged": 38162, - "ĠCynthia": 38163, - "heet": 38164, - "ä¸ŃåĽ½": 38165, - "ĠNissan": 38166, - "ĠQuesto": 38167, - "é¨": 38168, - "did": 38169, - "Ġμια": 38170, - "ĠEllis": 38171, - "ĠAnalysis": 38172, - "cemos": 38173, - "Ġaseg": 38174, - "ĠMyster": 38175, - "ĠCao": 38176, - "Ġtuv": 38177, - "ĠIndustry": 38178, - "ì£¼ê³ł": 38179, - "otal": 38180, - "Ġpequeño": 38181, - "bras": 38182, - "Ġcomprehend": 38183, - "ĠSimpson": 38184, - "ÑģÑĤвие": 38185, - "ocracy": 38186, - "иÑĩеÑģки": 38187, - "ĠMush": 38188, - "ĠLaurie": 38189, - "Ġtriangular": 38190, - "ĠPresents": 38191, - "ĠKunden": 38192, - "ç´¹": 38193, - "æѦ": 38194, - "ĠIss": 38195, - "ĠDeck": 38196, - "á»ĥn": 38197, - "ĠDarkness": 38198, - "Ġinflammatory": 38199, - "eremiah": 38200, - "Ġwarmed": 38201, - "veyard": 38202, - "ĠMemory": 38203, - "etty": 38204, - "Ġtaxpayers": 38205, - "à¸ĵ": 38206, - "Ø¡": 38207, - "Ġpractise": 38208, - "ëĭ¬ë": 38209, - "Ġdrilled": 38210, - "mÃ¼ÅŁ": 38211, - "logo": 38212, - "ĠFach": 38213, - "¤ë¡ľ": 38214, - "Ġübrigens": 38215, - "Ġkonnten": 38216, - "Ġnormalmente": 38217, - "Ġargues": 38218, - "ilingual": 38219, - "°ë¥¼": 38220, - "egal": 38221, - "Ġtravaill": 38222, - "ovy": 38223, - "аÑĤо": 38224, - "Ġruth": 38225, - "ĠLights": 38226, - "Ġconsisted": 38227, - "×ijר×Ļ×Ŀ": 38228, - "Ġstereotype": 38229, - "Ġpayer": 38230, - "ĠRee": 38231, - "ĠAirbnb": 38232, - "Ġdrowned": 38233, - "ĠZoe": 38234, - "Ġcanopy": 38235, - "Ġbarr": 38236, - "ĠноÑĩ": 38237, - "Ġpagan": 38238, - "Ġjars": 38239, - "Ġrê": 38240, - "erver": 38241, - "æĪ¿": 38242, - "ieben": 38243, - "Ġespect": 38244, - "ĠFi": 38245, - "Ġunwilling": 38246, - "Ġtechnician": 38247, - "ặt": 38248, - "member": 38249, - "ĠCanal": 38250, - "سÙħ": 38251, - "Ġlieber": 38252, - "Ġinference": 38253, - "Ġhonoring": 38254, - "åijµ": 38255, - "ĠCampaign": 38256, - "Ġlineage": 38257, - "ĠStress": 38258, - "Ġvictories": 38259, - "Ġdeja": 38260, - "×£": 38261, - "êtes": 38262, - "blick": 38263, - "Ġменее": 38264, - "oths": 38265, - "ĠCouple": 38266, - "Jason": 38267, - "ĠNicolas": 38268, - "екÑģ": 38269, - "lib": 38270, - "Ġherramient": 38271, - "Ġ×IJ×ķ×ŀר": 38272, - "Ġвидим": 38273, - "millimeter": 38274, - "Ġsilhouette": 38275, - "Ġdriveway": 38276, - "Ġcherish": 38277, - "ãħłãħł": 38278, - "Ġransom": 38279, - "Ġinterdisciplinary": 38280, - "ĠPortal": 38281, - "Ġtrag": 38282, - "thood": 38283, - "Ġtedious": 38284, - "Ġglossy": 38285, - "Ġprépar": 38286, - "ĠCay": 38287, - "ĠTook": 38288, - "ĠBottom": 38289, - "Ġzig": 38290, - "å«": 38291, - "åį±": 38292, - "represented": 38293, - "à¹Ģลย": 38294, - "Ġdesarrollo": 38295, - "ìĦľë": 38296, - "Ġviscos": 38297, - "Ġmilligram": 38298, - "ĠGund": 38299, - "Ġferment": 38300, - "drum": 38301, - "Ġdrawers": 38302, - "Laugh": 38303, - "Ġpelos": 38304, - "Ġpavement": 38305, - "Ġmemoir": 38306, - "avait": 38307, - "Ġ2050": 38308, - "¤ë¥¼": 38309, - "Ġrazón": 38310, - "Ġflourish": 38311, - "Ġstern": 38312, - "ä¸Ī": 38313, - "ĠChung": 38314, - "Ġserpent": 38315, - "ĠGentlemen": 38316, - "羣çļĦå¾Ī": 38317, - "kook": 38318, - "Ġlut": 38319, - "importe": 38320, - "parent": 38321, - "Ġwsz": 38322, - "Ġscree": 38323, - "ĠMitarbeiter": 38324, - "å·´": 38325, - "mut": 38326, - "Ġìĸĺ기를": 38327, - "Ġsemble": 38328, - "ĠOW": 38329, - "Ġinvestigator": 38330, - "ĠCheryl": 38331, - "ĠGerald": 38332, - "Ġprere": 38333, - "Ġcompares": 38334, - "nyt": 38335, - "Ġdiferença": 38336, - "?-": 38337, - "Ġquá": 38338, - "ר×Ļ": 38339, - "Sen": 38340, - "Ġheps": 38341, - "Ġgratuit": 38342, - "Ġconsort": 38343, - "ĠSTOP": 38344, - "ĠProtestant": 38345, - "Ġelectrode": 38346, - "âĹ": 38347, - "Ġsecurely": 38348, - "иÑĩеÑģкой": 38349, - "Ġtää": 38350, - "Ġregisters": 38351, - "ĠHeavenly": 38352, - "ogly": 38353, - "issä": 38354, - "ĠPhysics": 38355, - "ĠMerkel": 38356, - "Ġrév": 38357, - "éĻ¢": 38358, - "Ġerased": 38359, - "ĠSacramento": 38360, - "Ġcoffin": 38361, - "Ġexacer": 38362, - "Ġlanz": 38363, - "Ġpoets": 38364, - "ulif": 38365, - "Ġì¹ĺë": 38366, - "ĠNerd": 38367, - "ĠNCT": 38368, - "ĠHour": 38369, - "nehmer": 38370, - "ŀĺëıĦ": 38371, - "ĠPrinci": 38372, - "Sw": 38373, - "mies": 38374, - "armed": 38375, - "ĠBeatles": 38376, - "Ġpropagation": 38377, - "Ġexchanged": 38378, - "Ġcumulative": 38379, - "Ġì§ijìĹIJ": 38380, - "Ġdefeating": 38381, - "æĬ±": 38382, - "bels": 38383, - "Ġwes": 38384, - "ĠOdyssey": 38385, - "ä½łæĥ³": 38386, - "avior": 38387, - "ĠìľĦìĹIJ": 38388, - "Ġbrit": 38389, - "Ġhijo": 38390, - "DAY": 38391, - "ĠاÙĦتÙĬ": 38392, - "ĠСеÑĢг": 38393, - "Ñĥка": 38394, - "edsiÄĻ": 38395, - "Ġimpos": 38396, - "Ġellas": 38397, - "Ġfirearms": 38398, - "ĠNR": 38399, - "Ġ×ij×IJ": 38400, - "ĠÐŁÐ¾ÐºÐ°": 38401, - "awi": 38402, - "ĠìĦ±ê³µ": 38403, - "Ġpupils": 38404, - "ĠTack": 38405, - "Ġfrase": 38406, - "ĠShip": 38407, - "Ġstad": 38408, - "举": 38409, - "ĠGreater": 38410, - "unun": 38411, - "immung": 38412, - "grown": 38413, - "ĠNXT": 38414, - "ĠAmericas": 38415, - "fox": 38416, - "Ġmanten": 38417, - "éłIJåĤĻ": 38418, - "ĠÑģок": 38419, - "Ġrikt": 38420, - "lectric": 38421, - "deep": 38422, - "ĠзнаеÑĪÑĮ": 38423, - "Ġbenut": 38424, - "ĠInfrast": 38425, - "ĠEmir": 38426, - "ĠоÑĤпÑĢав": 38427, - "ĠKimchi": 38428, - "ĠFinnish": 38429, - "´ìłģ": 38430, - "inaire": 38431, - "Ġoike": 38432, - "æ¸ħæ¥ļ": 38433, - "Ġhostage": 38434, - "ĠButton": 38435, - "ÙĤÙĬ": 38436, - "eking": 38437, - "ĠKazakh": 38438, - "Ġcomforting": 38439, - "Ġsog": 38440, - "Ġgreeted": 38441, - "guitar": 38442, - "payer": 38443, - "Ġrelational": 38444, - "Ġconstruir": 38445, - "çī¹åĪ¥": 38446, - "opian": 38447, - "ĠVolume": 38448, - "ieth": 38449, - "ÑģÑĤвом": 38450, - "urrection": 38451, - "liÅĽmy": 38452, - "Ġhemisphere": 38453, - "ĠBean": 38454, - "IGN": 38455, - "Ġkötü": 38456, - "ĠFallout": 38457, - "Ġbrace": 38458, - "ç¹¼çºĮ": 38459, - "ÏĢά": 38460, - "ĠHAS": 38461, - "Ġgé": 38462, - "Ġcharacterize": 38463, - "ặc": 38464, - "ĠMilky": 38465, - "Ġtumors": 38466, - "Ġnuit": 38467, - "ĠGaz": 38468, - "ĠìŀĪëĭ¤ëĬĶ": 38469, - "ĠгаÑĢ": 38470, - "essment": 38471, - "ĠAbe": 38472, - "Ġë½ij": 38473, - "ĠEinsatz": 38474, - "JIN": 38475, - "jä": 38476, - "Cry": 38477, - "ĠPromised": 38478, - "ĠÑģеÑĢд": 38479, - "okus": 38480, - "Ġscalable": 38481, - "ĠпоÑģмоÑĤÑĢеÑĤÑĮ": 38482, - "ücklich": 38483, - "Ġrealism": 38484, - "Ġmayo": 38485, - "Ġjuvenile": 38486, - "Ġheadlights": 38487, - "ĠgörÃ¼ÅŁ": 38488, - "ĠReform": 38489, - "Ġhalves": 38490, - "czne": 38491, - "Ġbreakup": 38492, - "żej": 38493, - "Ġrätt": 38494, - "Day": 38495, - "ĠìĿ¼ë³¸": 38496, - "Ġmuerte": 38497, - "Ġtunes": 38498, - "ĠSmile": 38499, - "record": 38500, - "Ġrecherche": 38501, - "atisfied": 38502, - "Ġpozi": 38503, - "Ġcelebrations": 38504, - "isexual": 38505, - "ĠROB": 38506, - "thirds": 38507, - "ĠFortune": 38508, - "ĠÑĤой": 38509, - "Ġbranded": 38510, - "loo": 38511, - "Ġdud": 38512, - "Ġrandomized": 38513, - "Ġcombin": 38514, - "ä¸ĢäºĽ": 38515, - "ieran": 38516, - "czenia": 38517, - "įãĥ«": 38518, - "Ġcurator": 38519, - "Ġartery": 38520, - "ĠÑĥÑĪ": 38521, - "ĠÑĩиÑĤ": 38522, - "Ġsubsidies": 38523, - "Ġblossom": 38524, - "ĠTwilight": 38525, - "Ġhyvä": 38526, - "ĠPompe": 38527, - "ĠCisco": 38528, - "ĠÐŁÑĢо": 38529, - "Ġbiri": 38530, - "Ġgern": 38531, - "Ġrebuilt": 38532, - "Ġwcze": 38533, - "Ġbenefici": 38534, - "Ġdrummer": 38535, - "Ġsolids": 38536, - "Ġdiyorsun": 38537, - "ãģĤãĤĬãģĮãģ¨ãģĨãģĶãģĸãģĦãģ¾ãģĹãģŁ": 38538, - "lated": 38539, - "Ġmuddy": 38540, - "Ġholog": 38541, - "Ġclaps": 38542, - "ĠRings": 38543, - "ĠOkey": 38544, - "ĠBrave": 38545, - "Ġvaluation": 38546, - "Ġmigrant": 38547, - "Ġintermitt": 38548, - "Ġeigene": 38549, - "iliary": 38550, - "ãĥ¼ãĥĪ": 38551, - "markt": 38552, - "kr": 38553, - "ĠRib": 38554, - "á»Ļi": 38555, - "Ġaccusations": 38556, - "Ġarab": 38557, - "wash": 38558, - "ĠBardzo": 38559, - "Ġugh": 38560, - "esters": 38561, - "ophren": 38562, - "Ġalimentos": 38563, - "ĠUz": 38564, - "ÖĤ": 38565, - "Ġ650": 38566, - "ĠпÑĢиеÑħ": 38567, - "FI": 38568, - "Ġsampai": 38569, - "Ġparlé": 38570, - "hesion": 38571, - "Ġsır": 38572, - "Ġapparatus": 38573, - "Ġcorrelated": 38574, - "ĠPrincipal": 38575, - "Ġcorr": 38576, - "ĠOfficial": 38577, - "иÑĩеÑģкие": 38578, - "Ġterminals": 38579, - "Should": 38580, - "Ġvacun": 38581, - "Ġstellt": 38582, - "Ġmooi": 38583, - "etzung": 38584, - "ĠкÑĢа": 38585, - "Ġdai": 38586, - "Ġпож": 38587, - "Team": 38588, - "ĠPPE": 38589, - "ĠÐŀÑģ": 38590, - "ĠLeah": 38591, - "ĠIvy": 38592, - "yst": 38593, - "Ġuhhh": 38594, - "Ġnighttime": 38595, - "Ġtrendy": 38596, - "Ġsecurities": 38597, - "Ġcontinents": 38598, - "Ġfirsthand": 38599, - "ĠVeron": 38600, - "ĠëĤ®": 38601, - "Ġbrowsing": 38602, - "ĠCada": 38603, - "tro": 38604, - "Ġtramp": 38605, - "reib": 38606, - "Ġerstmal": 38607, - "irler": 38608, - "Ġpsic": 38609, - "Ġgetir": 38610, - "ĠNP": 38611, - "Ġdzieci": 38612, - "обÑĢаз": 38613, - "Ġmagician": 38614, - "Ġscrutiny": 38615, - "Ġslab": 38616, - "ĠOT": 38617, - "isty": 38618, - "iries": 38619, - "orest": 38620, - "Ġtasked": 38621, - "Ġmorally": 38622, - "ìķ¼ì§Ģ": 38623, - "ustered": 38624, - "Ġfools": 38625, - "Ġirrespons": 38626, - "Ġeinf": 38627, - "Ġviá»ĩc": 38628, - "Ġscor": 38629, - "Ġpillows": 38630, - "ĠGegen": 38631, - "Ġtutte": 38632, - "Ġquarterly": 38633, - "Ġdidnt": 38634, - "ĠGym": 38635, - "ĠEther": 38636, - "ĠØ«": 38637, - "лиÑĪком": 38638, - "Ġsignaling": 38639, - "ĠNode": 38640, - "ĠDoncs": 38641, - "Ġyah": 38642, - "ĠKanal": 38643, - "Ġfading": 38644, - "etin": 38645, - "Ġinfluencers": 38646, - "Ġmedals": 38647, - "Ġengineered": 38648, - "Ġfermented": 38649, - "ê²łì§Ģë§Į": 38650, - "ĠBeethoven": 38651, - "×ŀש": 38652, - "inental": 38653, - "ĠìķĮ볤": 38654, - "ütfen": 38655, - "alnya": 38656, - "Ġovere": 38657, - "Ġdenkt": 38658, - "акÑĤеÑĢ": 38659, - "Ġâĺ": 38660, - "Ġnecesit": 38661, - "Ġgenerators": 38662, - "grass": 38663, - "ĠподÑĥм": 38664, - "lieÃŁen": 38665, - "Bar": 38666, - "ľëıĻ": 38667, - "ĠдеÑĤей": 38668, - "Ġsucking": 38669, - "Ġstencil": 38670, - "Ġprimo": 38671, - "ĠBreath": 38672, - "strom": 38673, - "Ġimmensely": 38674, - "Ġappreh": 38675, - "ìłķìĿ´": 38676, - "Pop": 38677, - "Ġjong": 38678, - "ĠGiul": 38679, - "ĠADHD": 38680, - "Ġhören": 38681, - "Ġelo": 38682, - "ivent": 38683, - "Ġrus": 38684, - "Ġoutrageous": 38685, - "Ġmastered": 38686, - "Ġ커": 38687, - "ÙĪÙģ": 38688, - "ipes": 38689, - "ĠRudy": 38690, - "Jacob": 38691, - "Ġbullish": 38692, - "Ġtapped": 38693, - "Ġfaud": 38694, - "izophren": 38695, - "ĠÑģоÑħ": 38696, - "ĠDarling": 38697, - "Ġ1963": 38698, - "ĠPrevention": 38699, - "²Ķ": 38700, - "Ġabdominal": 38701, - "stones": 38702, - "Ġavaient": 38703, - "á»ķi": 38704, - "make": 38705, - "Ġsare": 38706, - "ĠInstant": 38707, - "кам": 38708, - "Ġkeeper": 38709, - "Ġblankets": 38710, - "ãģ§ãģĹãĤĩãģĨ": 38711, - "Ġsweats": 38712, - "ĠMinneapolis": 38713, - "åħ¨éĥ¨": 38714, - "Ġgenommen": 38715, - "Ġfasten": 38716, - "ĠBrussels": 38717, - "åij¼": 38718, - "Ġcafeter": 38719, - "Ġabsorbing": 38720, - "Ġhago": 38721, - "ĠElmo": 38722, - "Ġgusto": 38723, - "ĠYap": 38724, - "Música": 38725, - "Ġtert": 38726, - "Ġbanda": 38727, - "Ġmily": 38728, - "Ġthereafter": 38729, - "ĠStockholm": 38730, - "ĠCarson": 38731, - "Ġcalibration": 38732, - "avaÅŁ": 38733, - "ansa": 38734, - "ikke": 38735, - "Ġforesee": 38736, - "Ġqualche": 38737, - "Ġdeste": 38738, - "æ¤": 38739, - "ünüz": 38740, - "Ġforge": 38741, - "Dis": 38742, - "esten": 38743, - "Ġδια": 38744, - "Ġencaps": 38745, - "ĠGespr": 38746, - "Ġchercher": 38747, - "ickets": 38748, - "ÑĤоÑĢÑĭ": 38749, - "Cr": 38750, - "ĠТакже": 38751, - "Ġrabbits": 38752, - "ĠDot": 38753, - "heiten": 38754, - "Ġcausal": 38755, - "ĠFoster": 38756, - "ajÄħc": 38757, - "Ġbereit": 38758, - "Ġayudar": 38759, - "é«Ļ": 38760, - "ãģ³": 38761, - "song": 38762, - "comb": 38763, - "Ġfringe": 38764, - "Ġcybersecurity": 38765, - "Ġ뾨": 38766, - "Ġkier": 38767, - "Ġbeschäft": 38768, - "ĠконÑĨе": 38769, - "Ġfacilit": 38770, - "ĠNamen": 38771, - "Ġbilateral": 38772, - "tx": 38773, - "ĠWissenschaft": 38774, - "Ġnuances": 38775, - "Ġripping": 38776, - "Ġfy": 38777, - "ĠSicherheit": 38778, - "ĠGhana": 38779, - "olon": 38780, - "Ġtopped": 38781, - "ĠMorocco": 38782, - "Ġradial": 38783, - "ĠLEE": 38784, - "ĠAndreas": 38785, - "edd": 38786, - "ĠìĹ´ë": 38787, - "ĠAirlines": 38788, - "ãģĵãĤį": 38789, - "Ġvalores": 38790, - "ê·ľ": 38791, - "Hy": 38792, - "ĠзадаÑĩ": 38793, - "ĠKendall": 38794, - "ĠÑħаÑĢ": 38795, - "ĠVamp": 38796, - "Ġpython": 38797, - "Ġmanageable": 38798, - "ĠGente": 38799, - "oise": 38800, - "iciary": 38801, - "Ġimposs": 38802, - "ĠBunny": 38803, - "iesta": 38804, - "Andrew": 38805, - "Ġsert": 38806, - "ĠCec": 38807, - "zzarella": 38808, - "Ġautomobile": 38809, - "ĠTiere": 38810, - "allows": 38811, - "åĨĨ": 38812, - "Ġë°Ģ": 38813, - "ĠScorp": 38814, - "ĠJelly": 38815, - "agara": 38816, - "ĠStretch": 38817, - "Ġredef": 38818, - "Ġexacerb": 38819, - "ĠSHA": 38820, - "éf": 38821, - "orsa": 38822, - "Ġflawed": 38823, - "ĠNoel": 38824, - "?!?": 38825, - "Ġprocent": 38826, - "Ġmenstru": 38827, - "ĠпÑĢоÑĩ": 38828, - "Ġinfants": 38829, - "ðŁİµ": 38830, - "pause": 38831, - "ĠRacing": 38832, - "Ġ1948": 38833, - "Ġsuperintendent": 38834, - "idores": 38835, - "idy": 38836, - "brahim": 38837, - "Ġunlucky": 38838, - "Ġperk": 38839, - "anci": 38840, - "Ġë§ĮëĤĺ": 38841, - "ĠÐľÐ¾Ñģкв": 38842, - "Ġfinans": 38843, - "Ġdiferencia": 38844, - "łĪìĿ´": 38845, - "éħį": 38846, - "ORY": 38847, - "ĠTac": 38848, - "ÛĮا": 38849, - "Ġdesem": 38850, - "Ġважно": 38851, - "ĠJU": 38852, - "ĠìŀĪìŀĸìķĦìļĶ": 38853, - "ĠÎĿ": 38854, - "Ġinformations": 38855, - "ĠHEL": 38856, - "hst": 38857, - "ĠпоговоÑĢ": 38858, - "Ġvoiture": 38859, - "Ġreus": 38860, - "ändig": 38861, - "ĠпоÑħож": 38862, - "jing": 38863, - "Ġdru": 38864, - "altra": 38865, - "Ġproduits": 38866, - "Ġkite": 38867, - "Ġeyeball": 38868, - "ĠBelt": 38869, - "ĠRestaurant": 38870, - "Ġgamb": 38871, - "Ġporridge": 38872, - "itters": 38873, - "Ġconverts": 38874, - "Ġyardım": 38875, - "Ġmáximo": 38876, - "wirtschaft": 38877, - "ĠíķĺëĤĺë": 38878, - "Ġì¤Ģ": 38879, - "Ġiceberg": 38880, - "Ġvorbei": 38881, - "Ġ256": 38882, - "ocratic": 38883, - "Ġreckless": 38884, - "onner": 38885, - "Ġmús": 38886, - "Ġlogically": 38887, - "ĠPrison": 38888, - "ĠNetz": 38889, - "Ġvacant": 38890, - "Ġnimmt": 38891, - "ĠHARR": 38892, - "Ġзов": 38893, - "ĠDee": 38894, - "ringe": 38895, - "niest": 38896, - "ĠRules": 38897, - "ìĬ¤ëŁ½": 38898, - "cussions": 38899, - "Ġfloral": 38900, - "Ġconstrained": 38901, - "Ġdifferentiation": 38902, - "ĠQuebec": 38903, - "ĠÛģÛĮÚº": 38904, - "Ġpública": 38905, - "itel": 38906, - "Ġaccommodations": 38907, - "ĠGrü": 38908, - "íľ": 38909, - "Ġpickles": 38910, - "иÑĩеÑģкиÑħ": 38911, - "Ġcommissions": 38912, - "ĠBaek": 38913, - "ĠçocuÄŁ": 38914, - "ĠMedium": 38915, - "Ġperiodically": 38916, - "Ġwonderfully": 38917, - "Ġstaffing": 38918, - "ìĽIJë": 38919, - "rire": 38920, - "fle": 38921, - "ĠMcL": 38922, - "ĠÑĤеп": 38923, - "ĠпеÑĢек": 38924, - "нолог": 38925, - "Ġíģ¬ê²Į": 38926, - "çĻ¼çı¾": 38927, - "Ġprosperous": 38928, - "ĠSpiritual": 38929, - "ĠChick": 38930, - "DIA": 38931, - "ĠÐŁÑĢивеÑĤ": 38932, - "ĠperÃŃ": 38933, - "ÑĮÑİÑĤ": 38934, - "Ġconsultants": 38935, - "ĠEarl": 38936, - "ä»Ĭå¹´": 38937, - "Ġruining": 38938, - "оÑĢе": 38939, - "Ġpenser": 38940, - "Ġtakiej": 38941, - "Ġstrengthened": 38942, - "ĠLiquid": 38943, - "онеÑĨ": 38944, - "аваÑĤÑĮ": 38945, - "Ġcamer": 38946, - "Ġdisagreement": 38947, - "Ġbathing": 38948, - "ĠYosh": 38949, - "aal": 38950, - "prechen": 38951, - "RISADAS": 38952, - "Ġsuperstar": 38953, - "æģŃ": 38954, - "лÑıÑĤÑĮ": 38955, - "Ġnib": 38956, - "ĠTherm": 38957, - "ĠDANIEL": 38958, - "Ġpaw": 38959, - "Ġliquids": 38960, - "Ġcapacit": 38961, - "arken": 38962, - "Ġvagina": 38963, - "Ġmashed": 38964, - "Ġemerges": 38965, - "yscy": 38966, - "Ġunrelated": 38967, - "ĠGuild": 38968, - "Ġinverted": 38969, - "itives": 38970, - "Tra": 38971, - "Ġbegr": 38972, - "Ġalte": 38973, - "ì§ķ": 38974, - "ãĤģãģ¦": 38975, - "ĠÑĢазÑĢабоÑĤ": 38976, - "finder": 38977, - "Ġдалее": 38978, - "ĠблагодаÑĢ": 38979, - "walker": 38980, - "Ġcrater": 38981, - "assadors": 38982, - "rences": 38983, - "inski": 38984, - "ĠKIM": 38985, - "ĠElliot": 38986, - "2017": 38987, - "ĠSr": 38988, - "inka": 38989, - "anov": 38990, - "Ġìŀĺ못": 38991, - "Ġproprietary": 38992, - "displaystyle": 38993, - "ĠÑģим": 38994, - "Ġизб": 38995, - "ĠPanel": 38996, - "Ġinstincts": 38997, - "ĠCommunications": 38998, - "麻": 38999, - "midt": 39000, - "Ġë§Įëĵ¤ìĸ´": 39001, - "ĠÑģлова": 39002, - "ĠGilbert": 39003, - "缮åīį": 39004, - "Так": 39005, - "voorbeeld": 39006, - "еÑİÑģÑĮ": 39007, - "aryn": 39008, - "quez": 39009, - "Ġdart": 39010, - "ÑĸÑĪ": 39011, - "ĠHut": 39012, - "Sal": 39013, - "Ġsoutheast": 39014, - "Ġpesticides": 39015, - "Ġhelicopters": 39016, - "Ġendured": 39017, - "iada": 39018, - "Ġbrewing": 39019, - "ìŬë": 39020, - "ĠÑģвобод": 39021, - "ĠSaints": 39022, - "ĠFrançais": 39023, - "ĠEconomics": 39024, - "Ġdisloc": 39025, - "ophobia": 39026, - "Camer": 39027, - "Ġnegotiated": 39028, - "ĠÑģÑĤали": 39029, - "ìĬ¤íģ": 39030, - "ogie": 39031, - "Ġtsunami": 39032, - "Ġpeeled": 39033, - "Ġmotivations": 39034, - "è¨Ń": 39035, - "ostat": 39036, - "flan": 39037, - "ĠDAC": 39038, - "Ġkav": 39039, - "'RE": 39040, - "ĠPearson": 39041, - "bbe": 39042, - "czenie": 39043, - "Ġatenção": 39044, - "íĨµëł¹": 39045, - "ãģ£ãģ¡": 39046, - "ĠÑĥдаÑĢ": 39047, - "Ġintroductory": 39048, - "ĠIci": 39049, - "ëĮĢë": 39050, - "akat": 39051, - "Ġtrench": 39052, - "Ġproceeded": 39053, - "ĠCoin": 39054, - "Ġderecho": 39055, - "ĠRede": 39056, - "æ¯Ľ": 39057, - "аннÑĭй": 39058, - "Ġincarcerated": 39059, - "ĠRichmond": 39060, - "Rock": 39061, - "ĠPav": 39062, - "ĠKarma": 39063, - "uges": 39064, - "Ġconteú": 39065, - "ë¹Ħ": 39066, - "Ġê·¸ë§Į": 39067, - "ĠGone": 39068, - "ĠwspóÅĤ": 39069, - "ĠRahmen": 39070, - "unken": 39071, - "Ġì¤ijìļĶíķľ": 39072, - "Ġib": 39073, - "Ġattaching": 39074, - "Hay": 39075, - "Ġsuka": 39076, - "ìį¹": 39077, - "Ġpivotal": 39078, - "ĠRespect": 39079, - "ÃŃda": 39080, - "IB": 39081, - "ĠVerantwort": 39082, - "wiet": 39083, - "Ġforensic": 39084, - "ÑĢиÑģÑĤ": 39085, - "ĠпÑĢинÑĨипе": 39086, - "Ġmarkings": 39087, - "Ġkettle": 39088, - "ĠOpera": 39089, - "ĠDoctors": 39090, - "Ġshredded": 39091, - "Ġrecuer": 39092, - "Ġvigil": 39093, - "ĠFail": 39094, - "Ġentrev": 39095, - "ĠдÑĥÑĪ": 39096, - "Ġoutbreaks": 39097, - "èµ°åIJ§": 39098, - "ĠÏĢο": 39099, - "Ġrogue": 39100, - "angled": 39101, - "Ġyearly": 39102, - "ĠCreed": 39103, - "Ġwam": 39104, - "Ġlotus": 39105, - "ê³¼ë": 39106, - "ãĢģãĢģ": 39107, - "ĠSpit": 39108, - "ĠItu": 39109, - "Ġstrains": 39110, - "Ġstamped": 39111, - "Ġplaint": 39112, - "Ġpotion": 39113, - "Ġconsolidation": 39114, - "è©ķ": 39115, - "оÑĩкÑĥ": 39116, - "Ġvlogging": 39117, - "Ġslate": 39118, - "ĠAuft": 39119, - "ĠIncor": 39120, - "ừng": 39121, - "§IJ": 39122, - "enh": 39123, - "ĠheiÃŁ": 39124, - "Ġdomest": 39125, - "ĠStrom": 39126, - "åį³": 39127, - "akis": 39128, - "Ġfragen": 39129, - "Ġfiner": 39130, - "ĠSug": 39131, - "Ġuphill": 39132, - "Ġéén": 39133, - "âĢ¦)": 39134, - "ĠÑģоп": 39135, - "ĠCorey": 39136, - "Ġsiebie": 39137, - "Ġmuse": 39138, - "Ġcloves": 39139, - "Ġpous": 39140, - "ĠFinanz": 39141, - "ĠRoute": 39142, - "amat": 39143, - "Ġmutually": 39144, - "ĠвнÑĥÑĤÑĢи": 39145, - "ĠSelena": 39146, - "ëĶ": 39147, - "ĠGaussian": 39148, - "ë¶ĢíĦ°": 39149, - "Ġ×ij׼": 39150, - "Ġejerc": 39151, - "å¾®": 39152, - "kea": 39153, - "ĠGerry": 39154, - "ĠSic": 39155, - "大çļĦ": 39156, - "Ġ1966": 39157, - "iese": 39158, - "Ġfossils": 39159, - "Ġestad": 39160, - "ĠKane": 39161, - "ciÄĩ": 39162, - "ĠìľłíĬľë": 39163, - "Ġпам": 39164, - "ĠCruise": 39165, - "intérieur": 39166, - "Ġbekannt": 39167, - "ĠPode": 39168, - "Ġdemander": 39169, - "Rem": 39170, - "Ġinvade": 39171, - "Ġdecorating": 39172, - "ropic": 39173, - "Ġcowboy": 39174, - "ĠPhoto": 39175, - "opolit": 39176, - "Ġì»¬ëŁ¬ë": 39177, - "Ġreap": 39178, - "Ġhandwriting": 39179, - "à¹Ħร": 39180, - "Ġëļ": 39181, - "Ġبعد": 39182, - "ĠMt": 39183, - "ÙĢ": 39184, - "Ġspaceship": 39185, - "Ġnationalism": 39186, - "Ġcouncils": 39187, - "ĠGriffin": 39188, - "ĠAhmed": 39189, - "Ġclich": 39190, - "ĠOL": 39191, - "wl": 39192, - "ĠPilot": 39193, - "å®®": 39194, - "Ġacronym": 39195, - "Ġgels": 39196, - "Ġelectroly": 39197, - "èĵ": 39198, - "Ġмной": 39199, - "Ġepisod": 39200, - "ĠDieses": 39201, - "ĠATP": 39202, - "Ġediyorum": 39203, - "Ġexpresses": 39204, - "Ġexhibits": 39205, - "Comm": 39206, - "ĠкÑĢÑĥп": 39207, - "Ġmatar": 39208, - "Ġ2025": 39209, - "ĠArtem": 39210, - "vasive": 39211, - "rÃł": 39212, - "ĠbeÅŁ": 39213, - "é»ĥ": 39214, - "Ġlizard": 39215, - "Ġfille": 39216, - "Ġì§Ī문": 39217, - "ĠмоÑī": 39218, - "Ġtür": 39219, - "Ġculprit": 39220, - "Ġwoven": 39221, - "ĠANY": 39222, - "nim": 39223, - "Ġtay": 39224, - "Ġpromin": 39225, - "Ġacompa": 39226, - "Ġidé": 39227, - "Ġboiler": 39228, - "ĠThemen": 39229, - "Ġavenue": 39230, - "ĠMud": 39231, - "ĠновÑĭе": 39232, - "Ġwitnessing": 39233, - "Ġlance": 39234, - "ĠCHAN": 39235, - "ĠBever": 39236, - "تÙħ": 39237, - "Ġchemotherapy": 39238, - "King": 39239, - "ĠbÄĻdÄĻ": 39240, - "Ġatual": 39241, - "Ġtive": 39242, - "Ġtalkin": 39243, - "Ġquedar": 39244, - "ieÃŁ": 39245, - "edel": 39246, - "Ġìĸ´ìłľ": 39247, - "Ġjogar": 39248, - "Ġör": 39249, - "Ġundertaking": 39250, - "ĠStrength": 39251, - "Ġmilhões": 39252, - "ĠWine": 39253, - "ĠMolt": 39254, - "讲": 39255, - "ãģijãĤĮ": 39256, - "Ġundermine": 39257, - "ĠArchives": 39258, - "vana": 39259, - "mercial": 39260, - "MC": 39261, - "Ġcaste": 39262, - "пÑĢ": 39263, - "Ġlegislators": 39264, - "ulators": 39265, - "ênio": 39266, - "Ġëį°ë": 39267, - "ĠÑħоÑĤиÑĤе": 39268, - "Ġнек": 39269, - "Ġsurn": 39270, - "Ġconsci": 39271, - "ĠPOW": 39272, - "Ġculinary": 39273, - "ĠKAT": 39274, - "ĠFolks": 39275, - "Ñĭваем": 39276, - "Ġвок": 39277, - "ãģijãĤĭ": 39278, - "service": 39279, - "pts": 39280, - "Ġпобед": 39281, - "æĺ¯åķĬ": 39282, - "Ġtents": 39283, - "Ġnord": 39284, - "STE": 39285, - "Ġrepublican": 39286, - "Ġwyk": 39287, - "Ġminions": 39288, - "èĻķ": 39289, - "Ġmemang": 39290, - "jest": 39291, - "Ġcomparative": 39292, - "Ġtyle": 39293, - "carbon": 39294, - "bedingt": 39295, - "ksen": 39296, - "Ġnegativity": 39297, - "Ġsjälv": 39298, - "Ġdú": 39299, - "æīĢæľī": 39300, - "Ġrecalled": 39301, - "cra": 39302, - "ĠTada": 39303, - "ĠÑĢÑĥки": 39304, - "ĠопÑĢедел": 39305, - "Ġprocrast": 39306, - "Ġjogos": 39307, - "ĠOo": 39308, - "ĠHearts": 39309, - "Ġéch": 39310, - "ĠksiÄħż": 39311, - "Ġcoarse": 39312, - "ĠTube": 39313, - "ĠGreens": 39314, - "Ġén": 39315, - "Ġdumbbell": 39316, - "ĠÑĤи": 39317, - "Ġquerer": 39318, - "اØŃ": 39319, - "Ïĥει": 39320, - "ĠпÑĢавилÑĮно": 39321, - "Ġпап": 39322, - "Ġcompra": 39323, - "Ġtér": 39324, - "ĠAntes": 39325, - "Ġoptimum": 39326, - "Ġbiscuit": 39327, - "κι": 39328, - "aczego": 39329, - "Ġìĭľê°ĦìĿ´": 39330, - "ĠMarines": 39331, - "vero": 39332, - "Ġvaccinations": 39333, - "Ġpetty": 39334, - "riters": 39335, - "Ġал": 39336, - "country": 39337, - "Ġcounters": 39338, - "Ġattendant": 39339, - "ĠHui": 39340, - "ãģ¨ãģĦãģĨãģĵãģ¨ãģ§": 39341, - "cka": 39342, - "ÑģÑĤвеннÑĭй": 39343, - "guy": 39344, - "Ġtricked": 39345, - "ĠRED": 39346, - "Ġthrilling": 39347, - "ÏĢοι": 39348, - "Ġpiggy": 39349, - "Ġanunci": 39350, - "ORTER": 39351, - "ĠValue": 39352, - "Ġrond": 39353, - "ĠADA": 39354, - "Ġposer": 39355, - "hores": 39356, - "ĠRoland": 39357, - "ĵ¯": 39358, - "Ġnoir": 39359, - "Ġש×IJ×": 39360, - "ë°ľ": 39361, - "iemand": 39362, - "ĠпоÑĤеÑĢ": 39363, - "ê³³": 39364, - "Ġê±±": 39365, - "Ġformatting": 39366, - "ĠLed": 39367, - "è§Ģçľ¾": 39368, - "Ġkillers": 39369, - "ĠÄijấy": 39370, - "Ġhaar": 39371, - "again": 39372, - "!>[": 45687, - "minster": 45688, - "Ġвли": 45689, - "Ġidentifier": 45690, - "ĠLambda": 45691, - "Ġtros": 45692, - "Ġflawless": 45693, - "Ġdetrimental": 45694, - "Ġbunları": 45695, - "War": 45696, - "Ġregião": 45697, - "羣çļĦæĺ¯": 45698, - "ĠBike": 45699, - "cessors": 45700, - "Ġcùng": 45701, - "ĠRN": 45702, - "Ġê½ĥ": 45703, - "Ġküçük": 45704, - "ĠBeginning": 45705, - "íĺ¸ë": 45706, - "Ġgewe": 45707, - "Ġdenote": 45708, - "ĠAlberto": 45709, - "Ġprobiot": 45710, - "Ġode": 45711, - "Ġmolar": 45712, - "Ġbursting": 45713, - "assumed": 45714, - "Ġfootprints": 45715, - "veda": 45716, - "Ġsteroids": 45717, - "Ġflaming": 45718, - "ĠEller": 45719, - "Ġerkennen": 45720, - "ätzen": 45721, - "Ġlifecycle": 45722, - "ĠDOU": 45723, - "ĠKarena": 45724, - "ĠGuerra": 45725, - "è¿ĺæĺ¯": 45726, - "Ġsinister": 45727, - "Ġpodéis": 45728, - "Ġparab": 45729, - "Ġoko": 45730, - "Ġmatéri": 45731, - "Ġcaric": 45732, - "sonaro": 45733, - "Ġpraticamente": 45734, - "ÑĥÑģа": 45735, - "Ġcomunque": 45736, - "Ġvigilant": 45737, - "Ġregimes": 45738, - "ĠShooting": 45739, - "Ġraids": 45740, - "ĠNora": 45741, - "ĠWieder": 45742, - "mens": 45743, - "ĠÑģод": 45744, - "Ġê²½ìļ°ìĹIJëĬĶ": 45745, - "ĠвÑħод": 45746, - "Ġautobi": 45747, - "ĠSchn": 45748, - "ĠRobbie": 45749, - "ĠFitness": 45750, - "ĠконÑĦ": 45751, - "Ġpenguin": 45752, - "моÑĤÑĢÑı": 45753, - "Ġминим": 45754, - "plays": 45755, - "Ġdelegates": 45756, - "Mer": 45757, - "Ġsistem": 45758, - "ĠMichaels": 45759, - "male": 45760, - "اع": 45761, - "Ġcách": 45762, - "ĠHä": 45763, - "Ġ×Ļ×ķ×ĵ×¢": 45764, - "Ġsuperpower": 45765, - "Ġstron": 45766, - "Ġrover": 45767, - "Ġdépend": 45768, - "éĻ³": 45769, - "Ġretiring": 45770, - "Ġvampires": 45771, - "Ġmerde": 45772, - "ĠChanging": 45773, - "Ġtame": 45774, - "Ġspokesperson": 45775, - "Ġcay": 45776, - "Ġflirting": 45777, - "ĠGrö": 45778, - "Ġwär": 45779, - "Ġwyb": 45780, - "Ġcoeur": 45781, - "ạnh": 45782, - "ĠìĻĢìĦľ": 45783, - "Ġconnais": 45784, - "ĠHundreds": 45785, - "ĠBea": 45786, - "ĠαÏĢ": 45787, - "pruch": 45788, - "Ġsociedade": 45789, - "ĠWhilst": 45790, - "ĠKait": 45791, - "espace": 45792, - "Ġchia": 45793, - "ĠErm": 45794, - "Ġë°Ķê¿": 45795, - "Ġfences": 45796, - "ĠMortal": 45797, - "ê²ģ": 45798, - "ĠгÑĢаÑĦ": 45799, - "ĠHomeland": 45800, - "ĠJUN": 45801, - "isst": 45802, - "Ġparlar": 45803, - "Ġsporty": 45804, - "éo": 45805, - "Ġdeepen": 45806, - "ĠBehavior": 45807, - "éĢı": 45808, - "åĵĪåĵĪåĵĪ": 45809, - "Ġerrand": 45810, - "Ġrotary": 45811, - "ĠWellington": 45812, - "Wind": 45813, - "Ġmesela": 45814, - "ảng": 45815, - "iende": 45816, - "Ġexcell": 45817, - "ĠGenius": 45818, - "ĠEduardo": 45819, - "æľī人": 45820, - "ĠÅŁunu": 45821, - "ĠÄ°stanbul": 45822, - "Ġproduto": 45823, - "Ġãħİãħİ": 45824, - "OFF": 45825, - "Ġwollt": 45826, - "çĪĨ": 45827, - "Ġëī´ìĬ¤": 45828, - "Ġlass": 45829, - "Ġhertz": 45830, - "Ġaromatic": 45831, - "Ġзвон": 45832, - "Ġautoc": 45833, - "ĠLust": 45834, - "Ġ112": 45835, - "ĠÎĹ": 45836, - "Ġreviewers": 45837, - "Ġreceptive": 45838, - "å°įäºĨ": 45839, - "ând": 45840, - "oglo": 45841, - "ĠìķĦëĭĻ": 45842, - "Ġngo": 45843, - "ÑĸÑĤи": 45844, - "Ã¥t": 45845, - "cono": 45846, - "Ġtekrar": 45847, - "Ġì£¼ê³ł": 45848, - "ĠgelmiÅŁ": 45849, - "Ġbedtime": 45850, - "ĠArgh": 45851, - "ADA": 45852, - "ĠгоÑĢода": 45853, - "ĠÄĩ": 45854, - "Ġalliances": 45855, - "giggling": 45856, - "Ġyerde": 45857, - "Ġspies": 45858, - "Ġgutes": 45859, - "çi": 45860, - "Ġalltid": 45861, - "ĠLah": 45862, - "ŀIJë": 45863, - "ĠdokÅĤad": 45864, - "ÙĪÙĬ": 45865, - "Ġtoxicity": 45866, - "Ġcancellation": 45867, - "Ġ1958": 45868, - "dro": 45869, - "ĠìŀijìĿĢ": 45870, - "ĠMotorola": 45871, - "Ġmultin": 45872, - "Ġenthusiasts": 45873, - "ĠMighty": 45874, - "ĠCoconut": 45875, - ":ãĢĮ": 45876, - "ĠPictures": 45877, - "Ġsangre": 45878, - "Ġblinking": 45879, - "olesome": 45880, - "ĠìĬ¤íĥĢìĿ¼": 45881, - "FP": 45882, - "Ġbooming": 45883, - "ĠдеÑģÑıÑĤ": 45884, - "Ġratchet": 45885, - "Ġtimelines": 45886, - "leness": 45887, - "Ġcages": 45888, - "ĠGoodnight": 45889, - "ometimes": 45890, - "Ġcunning": 45891, - "ĠRisk": 45892, - "uled": 45893, - "dade": 45894, - "Ġprata": 45895, - "ĠgustarÃŃa": 45896, - "amus": 45897, - "ĠJinping": 45898, - "Ġestrut": 45899, - "Ġdescobrir": 45900, - "ĠMÄģ": 45901, - "ĠAllan": 45902, - "ĠåĪĨ": 45903, - "Ġ׾ק": 45904, - "Ġpreserv": 45905, - "ĠStrawberry": 45906, - "Äı": 45907, - "Lu": 45908, - "Ġkro": 45909, - "ĠReports": 45910, - "ìħĶìķ¼": 45911, - "Ġvalt": 45912, - "Ġpouvait": 45913, - "Ġappar": 45914, - "ĠBone": 45915, - "Ġpreferably": 45916, - "ĠRepública": 45917, - "å°±åĪ°": 45918, - "Ġherzlich": 45919, - "Ġchimney": 45920, - "Ġçev": 45921, - "Ġvisas": 45922, - "Ġverr": 45923, - "Ġcultivation": 45924, - "ĠArmenia": 45925, - "ĠвдÑĢÑĥг": 45926, - "Ġcockro": 45927, - "retched": 45928, - "artz": 45929, - "ĠлÑİдÑıм": 45930, - "ĠpolÃŃticas": 45931, - "ĠPanz": 45932, - "ĠAKA": 45933, - "ĠëĪĮ룬": 45934, - "Ġerro": 45935, - "Ġcamper": 45936, - "Ġ102": 45937, - "स": 45938, - "done": 45939, - "Ġhoard": 45940, - "ĠÐŁÐ¾ÑĤом": 45941, - "jeong": 45942, - "Ġdesta": 45943, - "pak": 45944, - "Ġinim": 45945, - "Ġgrowers": 45946, - "ĠMessage": 45947, - "Ġelector": 45948, - "engage": 45949, - "ĠForbes": 45950, - "ĠCincinnati": 45951, - "Ġdifférence": 45952, - "df": 45953, - "Ġspar": 45954, - "Ġawaits": 45955, - "ĠUSSR": 45956, - "ĠRising": 45957, - "ĠHoÅŁ": 45958, - "Ġfooting": 45959, - "Ġcondiciones": 45960, - "ÑĤоÑĢов": 45961, - "Ġclinician": 45962, - "ĠDiskuss": 45963, - "å£ĵ": 45964, - "ר×Ĵ": 45965, - "×¥": 45966, - "iteit": 45967, - "gren": 45968, - "Ġcharisma": 45969, - "Ġleuke": 45970, - "Ġirritating": 45971, - "Ġcirca": 45972, - "ĠRhodes": 45973, - "Ġpior": 45974, - "Ġhandicap": 45975, - "royable": 45976, - "Ġvull": 45977, - "OG": 45978, - "ĠinÃŃcio": 45979, - "ieri": 45980, - "Ġsplashing": 45981, - "Ġdemise": 45982, - "Ġassistir": 45983, - "ÑĩÑĤо": 45984, - "Ġcovert": 45985, - "ĠGud": 45986, - "à¸ī": 45987, - "klär": 45988, - "ĠìŀIJ꾸": 45989, - "Ġverändert": 45990, - "ĠREM": 45991, - "ĠConven": 45992, - "atge": 45993, - "Ġpierwsze": 45994, - "Ġclergy": 45995, - "lington": 45996, - "liv": 45997, - "VPN": 45998, - "ĠÑģожал": 45999, - "ĠHate": 46000, - "ãģ¨ãģĵãĤį": 46001, - "ÏĨο": 46002, - "ĠRespons": 46003, - "озд": 46004, - "Ġetmek": 46005, - "Ġchemin": 46006, - "ÙħØ©": 46007, - "Ġê°Ģ족": 46008, - "Tre": 46009, - "Ġumas": 46010, - "ĠBurton": 46011, - "Ġpatriarch": 46012, - "ĠSmithsonian": 46013, - "¥ĺ": 46014, - "Moon": 46015, - "Air": 46016, - "Ġmedios": 46017, - "Ġeraser": 46018, - "Ġwollten": 46019, - "Ġpareil": 46020, - "ĠBillie": 46021, - "æĬ½": 46022, - "еÑĢÑĤв": 46023, - "Ġparlament": 46024, - "Ġagony": 46025, - "ĠQUE": 46026, - "sequently": 46027, - "Another": 46028, - "ĠWhew": 46029, - "ĠAnnual": 46030, - "Ġseben": 46031, - "ìĥģìĿĦ": 46032, - "values": 46033, - "ŀľë§Į": 46034, - "Ġsinon": 46035, - "ereal": 46036, - "ĠEnlight": 46037, - "ĠChemistry": 46038, - "ĠCatalunya": 46039, - "Ġdoctr": 46040, - "anton": 46041, - "Ġstuk": 46042, - "ĠPlate": 46043, - "ĠKardashian": 46044, - "Ġfilos": 46045, - "ĠWet": 46046, - "ĠпопÑĭÑĤ": 46047, - "Ġunknowns": 46048, - "ĠSchon": 46049, - "ĠBaldwin": 46050, - "Ġtelescopes": 46051, - "ĠGucci": 46052, - "oxide": 46053, - "ĠConservative": 46054, - "ìĦ±ìĿĦ": 46055, - "Ġhinaus": 46056, - "Power": 46057, - "Ġê±´ê°ķ": 46058, - "Ġprevail": 46059, - "orman": 46060, - "machine": 46061, - "Ġ1946": 46062, - "Ġunbel": 46063, - "Ġschaut": 46064, - "Ġpiel": 46065, - "eenth": 46066, - "Ġobjectively": 46067, - "Ġchakra": 46068, - "audio": 46069, - "Ġchicos": 46070, - "ĠVault": 46071, - "å°Ī": 46072, - "Ġmedicinal": 46073, - "ĠTail": 46074, - "While": 46075, - "Ġasphalt": 46076, - "Ġfroze": 46077, - "ĠEK": 46078, - "unching": 46079, - "nosis": 46080, - "2015": 46081, - "ĠGri": 46082, - "Ġoddly": 46083, - "ĠMär": 46084, - "ĠAeg": 46085, - "colo": 46086, - "Par": 46087, - "Ġëĵ¤ìĸ´ë": 46088, - "Ġvinden": 46089, - "ĠOVER": 46090, - "Ġiced": 46091, - "Ġscorp": 46092, - "Ġhac": 46093, - "qualified": 46094, - "ĠÑĥвидеÑĤÑĮ": 46095, - "ermo": 46096, - "HEN": 46097, - "Ġsoi": 46098, - "Ġmultiples": 46099, - "Ġlayouts": 46100, - "Ġblindness": 46101, - "ĠBowser": 46102, - "ĠподÑĤ": 46103, - "ĠÃİ": 46104, - "ventional": 46105, - "Ġmata": 46106, - "madı": 46107, - "Ġgeez": 46108, - "Ġcadence": 46109, - "Ġważne": 46110, - "ĠChristie": 46111, - "venge": 46112, - "Call": 46113, - "Ġturnaround": 46114, - "Ġblob": 46115, - "ĠЯк": 46116, - "ĠVoiceover": 46117, - "Ġperil": 46118, - "ĠJaime": 46119, - "ĠHOY": 46120, - "lane": 46121, - "Ġsebel": 46122, - "ĠDuo": 46123, - "ĠHistorical": 46124, - "Ġdni": 46125, - "Ġgema": 46126, - "yk": 46127, - "Ġsabem": 46128, - "ắng": 46129, - "Ġvars": 46130, - "ĠRonnie": 46131, - "ĠRonaldo": 46132, - "ĠPerquè": 46133, - "nsinn": 46134, - "hair": 46135, - "Ġrelentless": 46136, - "Ġlyn": 46137, - "Ġtraveler": 46138, - "æĢİ麼äºĨ": 46139, - "nine": 46140, - "Ġantim": 46141, - "Ġì¼Ģ": 46142, - "Ġsnowball": 46143, - "ĠÑħаÑĢакÑĤеÑĢ": 46144, - "Ġinterns": 46145, - "Ġconstituency": 46146, - "ĠÐĿам": 46147, - "׾׾": 46148, - "VEL": 46149, - "Ġviktigt": 46150, - "Ġapoyo": 46151, - "ÙĦب": 46152, - "Ġjard": 46153, - "Ġheightened": 46154, - "ÑĢоÑģÑĤ": 46155, - "ĠSMITH": 46156, - "Ġдела": 46157, - "Ġrepairing": 46158, - "Ġrigt": 46159, - "ĠSheikh": 46160, - "ĠBritney": 46161, - "Ġeverytime": 46162, - "Ġadventurous": 46163, - "ockey": 46164, - "ernt": 46165, - "Ġataque": 46166, - "ĠAlternatively": 46167, - "effect": 46168, - "Ġpalavras": 46169, - "ĠElliott": 46170, - "Ġréussi": 46171, - "Ġhypertension": 46172, - "ĠManual": 46173, - "Ġprophetic": 46174, - "Ġhandc": 46175, - "ÑĮе": 46176, - "Ġrefrain": 46177, - "ĠSquid": 46178, - "ìŀ¡": 46179, - "Ġкоман": 46180, - "ällen": 46181, - "Ġllegó": 46182, - "Ġbash": 46183, - "iony": 46184, - "ĠÑģклад": 46185, - "Ġкаб": 46186, - "Ġcareless": 46187, - "ĠPool": 46188, - "Ġtrás": 46189, - "Ġfils": 46190, - "ĠSchr": 46191, - "Ġsprawd": 46192, - "ĠMonaten": 46193, - "Ġunforgettable": 46194, - "ĠCotton": 46195, - "Ġinconvenient": 46196, - "ĠRX": 46197, - "oris": 46198, - "Ġhumbled": 46199, - "ת×Ĺ": 46200, - "Ġآپ": 46201, - "ĠincreÃŃ": 46202, - "ĠKommentare": 46203, - "èĪĴ": 46204, - "ración": 46205, - "Ġvantage": 46206, - "ĠSeal": 46207, - "ĠìĿ´ê±°ë¥¼": 46208, - "Ġjoue": 46209, - "ãģĿãģĨãģ§ãģĻãģŃ": 46210, - "Ġìĺ¤ëŀĺ": 46211, - "ĠиÑģпÑĭÑĤ": 46212, - "oben": 46213, - "Ġgrate": 46214, - "Ġcontrole": 46215, - "ĠPercy": 46216, - "ÅĤada": 46217, - "Ġsimultaneous": 46218, - "Ġprototy": 46219, - "ĠgroÃŁer": 46220, - "Ġbewusst": 46221, - "inizi": 46222, - "Ġpassieren": 46223, - "ĠHappiness": 46224, - "åīĩ": 46225, - "shi": 46226, - "geht": 46227, - "Ġstationed": 46228, - "ĠErgebnis": 46229, - "Ġdirectamente": 46230, - "Ġsurvives": 46231, - "Ġpersones": 46232, - "BERG": 46233, - "Ġvomiting": 46234, - "Ġconhecer": 46235, - "Ġadjour": 46236, - "ĠCivic": 46237, - "pei": 46238, - "burst": 46239, - "Ġëĭ¤ëĭĪ": 46240, - "éı": 46241, - "Ġsled": 46242, - "Ġplataforma": 46243, - "ĠSect": 46244, - "ĠDefin": 46245, - "çĻ»éĮ²": 46246, - "énom": 46247, - "chnet": 46248, - "Ġprofitability": 46249, - "Ġerreicht": 46250, - "á»ıi": 46251, - "cation": 46252, - "Ġì§Ģê¸": 46253, - "Ġperdre": 46254, - "Ġfelony": 46255, - "Ġ1957": 46256, - "æĪijå¾Ī": 46257, - "Ġunsuccessful": 46258, - "Ġnagyon": 46259, - "Ġelasticity": 46260, - "Ġfacade": 46261, - "Ġearthly": 46262, - "ĠамеÑĢикан": 46263, - "Ġconn": 46264, - "cla": 46265, - "Du": 46266, - "Ġpolitiques": 46267, - "Ġhalo": 46268, - "iantes": 46269, - "Ġмоей": 46270, - "ãĥ³ãĥī": 46271, - "tones": 46272, - "elier": 46273, - "è®ļ": 46274, - "htaking": 46275, - "Ġwichtige": 46276, - "Ġanno": 46277, - "ĠLok": 46278, - "illions": 46279, - "Ġviver": 46280, - "Ġsolchen": 46281, - "Ġsuf": 46282, - "ĠSalz": 46283, - "ĠNvidia": 46284, - "zuge": 46285, - "ĠSpike": 46286, - "Video": 46287, - "Ġtwor": 46288, - "ĠAla": 46289, - "èijī": 46290, - "Ġhanya": 46291, - "ĠAdm": 46292, - "ìĿµ": 46293, - "ĠPatienten": 46294, - "ĠOnion": 46295, - "ĠKobe": 46296, - "ĠScene": 46297, - "ĠRash": 46298, - "æ¨Ļ": 46299, - "ÑĢаÑģÑĤ": 46300, - "istani": 46301, - "General": 46302, - "leye": 46303, - "imbap": 46304, - "Ġconcealed": 46305, - "ĠFridays": 46306, - "ĠWool": 46307, - "ĠновÑĭÑħ": 46308, - "شر": 46309, - "Ġê²°ê³¼": 46310, - "Ġjedoch": 46311, - "´ìĭľ": 46312, - "ĵ¤ëıĦ": 46313, - "Ġìŀ¥ëĤľ": 46314, - "ukt": 46315, - "Lou": 46316, - "Ġ먹ìĸ´": 46317, - "ĠExpect": 46318, - "Ġдомой": 46319, - "Ġirresponsible": 46320, - "Ġacerca": 46321, - "ĠZust": 46322, - "ר×ĺ": 46323, - "UI": 46324, - "Ġyoutubers": 46325, - "ĠPositive": 46326, - "Ġsocioe": 46327, - "Ġsnatch": 46328, - "èĥĮ": 46329, - "Ġrefreshed": 46330, - "Ġnominations": 46331, - "ĠPatt": 46332, - "Ġobsolete": 46333, - "ĠdemiÅŁ": 46334, - "åı¤": 46335, - "ormuÅŁ": 46336, - "ĠìĨĶì§ģíŀĪ": 46337, - "Ġfla": 46338, - "Ġcraziest": 46339, - "ĠZie": 46340, - "ĠTú": 46341, - "zep": 46342, - "icem": 46343, - "Ġë©ĭìŀĪ": 46344, - "Ġcynical": 46345, - "ãģĿãĤĵãģª": 46346, - "Ġtresp": 46347, - "Ġcraz": 46348, - "Õ¥Õ": 46349, - "Ġnelle": 46350, - "Ġmph": 46351, - "ĠNered": 46352, - "ĠKob": 46353, - "ĠEck": 46354, - "¨¸ëĭĪ": 46355, - "Jan": 46356, - "ĠТогда": 46357, - "Ġdeci": 46358, - "ĠVog": 46359, - "Ġbubbling": 46360, - "éĢĢ": 46361, - "úa": 46362, - "Ġproductos": 46363, - "iberal": 46364, - "Ġreplicated": 46365, - "ĠImprove": 46366, - "illary": 46367, - "Cha": 46368, - "Ġrédu": 46369, - "ĥIJíķĺë©´": 46370, - "Ġconnot": 46371, - "ĠKrit": 46372, - "ĠдÑĥÑħов": 46373, - "Ġtreadmill": 46374, - "ĠPW": 46375, - "ĠзовÑĥÑĤ": 46376, - "Ġclams": 46377, - "Ġdrafting": 46378, - "Ġ1956": 46379, - "unta": 46380, - "Ġexpenditures": 46381, - "ĠHoover": 46382, - "WOO": 46383, - "ÑĪее": 46384, - "Ġdeduction": 46385, - "monary": 46386, - "Ġrecib": 46387, - "Ġpovo": 46388, - "ĠëįĶë": 46389, - "ĠPAL": 46390, - "ĠBlow": 46391, - "Ġwyp": 46392, - "Ġdestac": 46393, - "deal": 46394, - "Graeme": 46395, - "Ġnécessaire": 46396, - "Ġdamned": 46397, - "Ġ1938": 46398, - "Ġìĭ¤ìłľë¡ľ": 46399, - "Ġtroop": 46400, - "Ġinsightful": 46401, - "ĠTJ": 46402, - "ĠоÑģв": 46403, - "Ġfidelity": 46404, - "ĠSkip": 46405, - "ĠMayo": 46406, - "ë§Ŀ": 46407, - "appe": 46408, - "Ġblas": 46409, - "ĠWY": 46410, - "ĠGN": 46411, - "ctar": 46412, - "Su": 46413, - "Ġcuent": 46414, - "hews": 46415, - "Ġcorpses": 46416, - "Abs": 46417, - "Ġwastewater": 46418, - "Ġciek": 46419, - "ĠOnu": 46420, - "Ġexplosives": 46421, - "Ġarma": 46422, - "ĠSTEPHAN": 46423, - "politik": 46424, - "ĠOsaka": 46425, - "taÅĤ": 46426, - "Ġyapıyor": 46427, - "Ġizquier": 46428, - "Ġbeleza": 46429, - "ĠWyatt": 46430, - "åIJ¸": 46431, - "Ġsuk": 46432, - "Ġspecjal": 46433, - "Ġdanke": 46434, - "whistle": 46435, - "ĠfÃŃsica": 46436, - "ĠHarriet": 46437, - "ĠìķĦíĮĮ": 46438, - "Ġwillkommen": 46439, - "iping": 46440, - "ĠÑģмоÑĤÑĢиÑĤе": 46441, - "ĠможеÑĪÑĮ": 46442, - "Ġinaccurate": 46443, - "Ġarrogance": 46444, - "ĠRemo": 46445, - "γά": 46446, - "assed": 46447, - "Ġdeliveries": 46448, - "Ġstinky": 46449, - "ĠпеÑĢеж": 46450, - "jay": 46451, - "Ġtransitional": 46452, - "Ġrere": 46453, - "ĠNGOs": 46454, - "ĠATM": 46455, - "خت": 46456, - "iology": 46457, - "Ġвлад": 46458, - "Ġschme": 46459, - "ĠShine": 46460, - "ìķ¡": 46461, - "pants": 46462, - "Ġserge": 46463, - "Ġsenhor": 46464, - "Ġabduct": 46465, - "ĠBryant": 46466, - "VES": 46467, - "Ġawakened": 46468, - "ĠLaz": 46469, - "ropolis": 46470, - "ĠLao": 46471, - "è¾Ľèĭ¦": 46472, - "Ġvilla": 46473, - "Ġsummers": 46474, - "Ġenthal": 46475, - "Ġ1949": 46476, - "Via": 46477, - "Ġìĸ´ì¨": 46478, - "Ġtendon": 46479, - "Ġviolet": 46480, - "Ġintellectually": 46481, - "Ġbounced": 46482, - "araus": 46483, - "Ġ1919": 46484, - "Ġvraag": 46485, - "Ġspel": 46486, - "ĠSchwar": 46487, - "Scott": 46488, - "ĠIndo": 46489, - "Ġë§Ŀ": 46490, - "Ġcanonical": 46491, - "ĠIKE": 46492, - "ĠthatÃŃs": 46493, - "Ġmellan": 46494, - "æ¯Ĵ": 46495, - "igmat": 46496, - "Could": 46497, - "...?)": 46498, - "Ġfoarte": 46499, - "ĠKumar": 46500, - "rendo": 46501, - "Ġélé": 46502, - "à´": 46503, - "valuation": 46504, - "cases": 46505, - "Ġintuitively": 46506, - "hong": 46507, - "etted": 46508, - "Ġsouven": 46509, - "Ġmorb": 46510, - "Ġcors": 46511, - "ĠNV": 46512, - "ĠHasan": 46513, - "æĥħåĨµ": 46514, - "ieved": 46515, - "Ġì§Ģê¸ĪìĿĢ": 46516, - "Ġdumpling": 46517, - "Ġcontrôle": 46518, - "Ġambiguity": 46519, - "æ©Łæľĥ": 46520, - "Ġcog": 46521, - "ĠScriptures": 46522, - "Ġcai": 46523, - "Ġbever": 46524, - "大家éĥ½": 46525, - "Ġhuis": 46526, - "Ġaime": 46527, - "Ġerklären": 46528, - "ĠLM": 46529, - "ĠFey": 46530, - "éļ¾": 46531, - "றத": 46532, - "Ġsupervised": 46533, - "Ġjewe": 46534, - "spl": 46535, - "ĠÑĨенÑĤÑĢ": 46536, - "Ġcollisions": 46537, - "ÙĦÙģ": 46538, - "ĠHogwarts": 46539, - "ĠDurham": 46540, - "×ķ×£": 46541, - "Ġphosphate": 46542, - "Ġoversee": 46543, - "Ġinspections": 46544, - "Ġbrinc": 46545, - "ĠZak": 46546, - "Ġpayoff": 46547, - "Ġchaud": 46548, - "ĠHunger": 46549, - "ãos": 46550, - "vir": 46551, - "Ġfiance": 46552, - "Ġboug": 46553, - "lived": 46554, - "cry": 46555, - "åĽŀä¾Ĩ": 46556, - "Ġjointly": 46557, - "Ġgirlfriends": 46558, - "ĠNexus": 46559, - "¦¬ê²łìĬµëĭĪëĭ¤": 46560, - "ĠKwang": 46561, - "åĵĪåĽī": 46562, - "å§ij": 46563, - "ÅĤÄĻ": 46564, - "ĠNeden": 46565, - "iece": 46566, - "Ġinserting": 46567, - "æŁĵ": 46568, - "ĠMummy": 46569, - "ĠGlobe": 46570, - "Ġlee": 46571, - "Ġgerman": 46572, - "Ġcreams": 46573, - "acho": 46574, - "ĠchÆ°a": 46575, - "ĠGalile": 46576, - "Ġfürs": 46577, - "Ġestiver": 46578, - "cidos": 46579, - "Christian": 46580, - "Ġlorsqu": 46581, - "Ġcutest": 46582, - "vale": 46583, - "ĠкÑĢеп": 46584, - "Ġwary": 46585, - "Ġslicing": 46586, - "Ġesperando": 46587, - "ĠVander": 46588, - "ĠDeixa": 46589, - "Ġ1954": 46590, - "ĠmówiÄħ": 46591, - "ÑĸÑĶ": 46592, - "Ġtooling": 46593, - "Ġrestor": 46594, - "Ġposición": 46595, - "Ġintentar": 46596, - "ĠApache": 46597, - "OUL": 46598, - "ĠÙĪب": 46599, - "Ġmatière": 46600, - "ãĥ¼ãĤĵ": 46601, - "Ġlinen": 46602, - "Ġestratég": 46603, - "ĠMutta": 46604, - "顯": 46605, - "è¡ĮäºĨ": 46606, - "Ġparting": 46607, - "Ġminimizing": 46608, - "Ġapprendre": 46609, - "æľĿ": 46610, - "Ġанглий": 46611, - "ĠDoo": 46612, - "ĠFirefox": 46613, - "cómo": 46614, - "Ġgeopolit": 46615, - "Ġmakan": 46616, - "Ġmogelijk": 46617, - "ĠÏĢεÏģι": 46618, - "Ġcứ": 46619, - "Ġinstaller": 46620, - "Ġdibuj": 46621, - "ĠHeath": 46622, - "loop": 46623, - "ĠBroken": 46624, - "HYUN": 46625, - "shelf": 46626, - "Ġfizer": 46627, - "Ġenhances": 46628, - "ä¾ĭãģĪãģ°": 46629, - "ĠдоÑģÑĤи": 46630, - "ĠPUB": 46631, - "ĠKollegin": 46632, - "Ġattained": 46633, - "ľ": 46634, - "Ġmistress": 46635, - "ĠOftentimes": 46636, - "×ŀ×Ļ×Ŀ": 46637, - "Ġbewe": 46638, - "ĠSora": 46639, - "rauen": 46640, - "baum": 46641, - "Ġrollers": 46642, - "Ġmering": 46643, - "ĠPAC": 46644, - "ĠнÑĸ": 46645, - "ĠRépublique": 46646, - "ĠÑĤÑĢав": 46647, - "ĠVanguard": 46648, - "uciones": 46649, - "Ġ무ëĮĢ": 46650, - "Ġgour": 46651, - "¯¤": 46652, - "ĠÏī": 46653, - "Ġsauna": 46654, - "Ġpeine": 46655, - "ĠValerie": 46656, - "ĠSikh": 46657, - "fendimiz": 46658, - "bero": 46659, - "ĠÑĩи": 46660, - "ĠdoÅĽwiad": 46661, - "ĠEuros": 46662, - "Ġcommentaires": 46663, - "Ġtweaks": 46664, - "ĠFaster": 46665, - "ĠÑĢаÑģк": 46666, - "Ġprogressively": 46667, - "ĠEuch": 46668, - "boro": 46669, - "ĠIngred": 46670, - "Cap": 46671, - "Ġuncheck": 46672, - "Ġìĺ¤ë¥¸": 46673, - "Ġwre": 46674, - "ĠFT": 46675, - "örung": 46676, - "Ġmemorized": 46677, - "ĠDinner": 46678, - "ĠPhew": 46679, - "oubl": 46680, - "Ġputa": 46681, - "Ġadmits": 46682, - "езде": 46683, - "opod": 46684, - "Ġpanda": 46685, - "Ġhinges": 46686, - "cipe": 46687, - "Ġtransact": 46688, - "Ġpodia": 46689, - "Ġpics": 46690, - "Ġcriterion": 46691, - "ĠOrchestra": 46692, - "ĠBlog": 46693, - "Ġsolemn": 46694, - "ĠPixar": 46695, - "Three": 46696, - "Ġвниз": 46697, - "ĠVolunte": 46698, - "ĠSavage": 46699, - "ĠPVC": 46700, - "ĠCaf": 46701, - "Ġwykon": 46702, - "Ġgraders": 46703, - "Ġcrouch": 46704, - "Ġcliche": 46705, - "Ġsoybeans": 46706, - "ĠMUR": 46707, - "ĠGonzalez": 46708, - "ĠMimi": 46709, - "ĠBolsonaro": 46710, - "Ġdiaphrag": 46711, - "Ġbilang": 46712, - "ëIJĺëĬĶ": 46713, - "éĤ£æĪijåĢij": 46714, - "Ġregulating": 46715, - "Mc": 46716, - "Judge": 46717, - "Ġнож": 46718, - "ĠjakÄħ": 46719, - "itesse": 46720, - "ĠWij": 46721, - "Ġlata": 46722, - "groaning": 46723, - "POSING": 46724, - "Ġ×IJ×ķת×ķ": 46725, - "Ġhaga": 46726, - "Ġgrounding": 46727, - "Ġviolently": 46728, - "Ġtills": 46729, - "Ġengag": 46730, - "ĠHollow": 46731, - "ĠпопÑĥлÑıÑĢ": 46732, - "Ġwprowad": 46733, - "Ġreplaces": 46734, - "Ġfluorescent": 46735, - "urgical": 46736, - "iggly": 46737, - "ĠTraditional": 46738, - "tte": 46739, - "ĠÙĦÙĩ": 46740, - "Ġphosphorus": 46741, - "Ġapron": 46742, - "ĠWaters": 46743, - "ĠKultur": 46744, - "авай": 46745, - "Ġolives": 46746, - "Ġ×Ķ×IJ׾": 46747, - "Ġteilweise": 46748, - "Ġsencill": 46749, - "Ġprends": 46750, - "Ġnarrower": 46751, - "Ġjätte": 46752, - "ĠInformationen": 46753, - "ìĥģìĿ´": 46754, - "Ġstarve": 46755, - "Ġfrick": 46756, - "ĠBeweg": 46757, - "ल": 46758, - "Ġdolphin": 46759, - "ĠLAUGHTER": 46760, - "ĠINTERVIE": 46761, - "åĶī": 46762, - "ĠyanlÄ±ÅŁ": 46763, - "Ġtorpedo": 46764, - "Ġshortages": 46765, - "ìĿ´ëĵľ": 46766, - "ıldı": 46767, - "Ġpaws": 46768, - "Ġozone": 46769, - "Ġcultivated": 46770, - "ĠFot": 46771, - "Ġnotor": 46772, - "ноз": 46773, - "ĠкоÑĪ": 46774, - "Ġtouchscreen": 46775, - "ĠAlly": 46776, - "æľĢè¿ij": 46777, - "Ġ맼ìŀĪìĸ´ìļĶ": 46778, - "ĠСеÑĢ": 46779, - "Ġвполне": 46780, - "Ġpaprika": 46781, - "ĠDustin": 46782, - "Ġefecto": 46783, - "Ġopini": 46784, - "Ġmuut": 46785, - "Ġhá»įc": 46786, - "Ġinterject": 46787, - "ÄĻt": 46788, - "Ġbutts": 46789, - "urez": 46790, - "ĠPike": 46791, - "ĠHok": 46792, - "ĠGuinea": 46793, - "ĠCathedral": 46794, - "Ġ1400": 46795, - "Cra": 46796, - "+,": 46797, - "맼": 46798, - "³´ëıĦë¡Ŀ": 46799, - "abyrin": 46800, - "Ġvideog": 46801, - "ĠоÑĢÑĥж": 46802, - "Ġuž": 46803, - "Ġbuscando": 46804, - "ĠAssistance": 46805, - "éĻ½": 46806, - "Ġmelhores": 46807, - "ì¡´": 46808, - "Ġëģ¼": 46809, - "ĠRJ": 46810, - "ĠتÙħ": 46811, - "Ġomin": 46812, - "Ġmotorcycles": 46813, - "ĠSapp": 46814, - "Ġsupplying": 46815, - "ĠAlgun": 46816, - "Ġaerospace": 46817, - "×¢×ľ": 46818, - "occup": 46819, - "leist": 46820, - "Ġê±°ëĬĶ": 46821, - "Ġcompleta": 46822, - "bres": 46823, - "!(": 46824, - "ĠÐŁÑĢед": 46825, - "Ġdisadvantaged": 46826, - "ĠAttend": 46827, - "ĠJudah": 46828, - "á»ĭch": 46829, - "ylene": 46830, - "actly": 46831, - "Ġsetups": 46832, - "Ġammonia": 46833, - "ĠSchweiz": 46834, - "ĠShame": 46835, - "Ġbande": 46836, - "ĠFuel": 46837, - "Ġtroublesome": 46838, - "Ġnumero": 46839, - "ĠMOM": 46840, - "ĠпÑĢедлаг": 46841, - "mentioned": 46842, - "ĠболÑĮÑĪое": 46843, - "ĠViktor": 46844, - "ĠStyles": 46845, - "Ġcrucified": 46846, - "ructured": 46847, - "environ": 46848, - "Ġmorals": 46849, - "Ġmeditating": 46850, - "Ġaxial": 46851, - "isance": 46852, - "ĠAbst": 46853, - "Green": 46854, - "Ġê±´ì": 46855, - "Ġquadrant": 46856, - "Ġpergi": 46857, - "Ġcameraman": 46858, - "ĠSequ": 46859, - "Ġpaused": 46860, - "ĠLaughing": 46861, - "ê·Ģ": 46862, - "?..": 46863, - "ĠÅ»e": 46864, - "Ġpermitir": 46865, - "Ġdetectors": 46866, - "ĠHUD": 46867, - "aval": 46868, - "ĠìĹ¬ê¸°ê¹Įì§Ģ": 46869, - "Ġhubs": 46870, - "Ġbestimmt": 46871, - "ĠбÑĥдеÑĤе": 46872, - "INTERPOSING": 46873, - "Ġtengan": 46874, - "Ġcrave": 46875, - "ĠBundesregierung": 46876, - "ĠBloody": 46877, - "Ġusability": 46878, - "ĠEas": 46879, - "ĠÄijá»Ļng": 46880, - "Ġ1955": 46881, - "Ġkriegen": 46882, - "Ġhabitual": 46883, - "Ġessentials": 46884, - "riminal": 46885, - "Ġroommates": 46886, - "éĤ£å°±": 46887, - "ĠпеÑĢеÑħод": 46888, - "Ġnghi": 46889, - "Ġmening": 46890, - "ĠSymphony": 46891, - "ĠHug": 46892, - "aggi": 46893, - "Ġwied": 46894, - "Ġmitad": 46895, - "ãģ£ãģ¦ãģĦãģĨ": 46896, - "teenth": 46897, - "idaÄĩ": 46898, - "Save": 46899, - "ĠrobiÄĩ": 46900, - "Ġbounces": 46901, - "°ĸìĹIJ": 46902, - "stars": 46903, - "Ġpragmatic": 46904, - "Ġcognition": 46905, - "Ġwrapper": 46906, - "Ġwarten": 46907, - "adh": 46908, - "Ġpensa": 46909, - "ĠHertz": 46910, - "ĠnÄĽ": 46911, - "ĠReid": 46912, - "ĠPCs": 46913, - "ĠMole": 46914, - "Ġ.....": 46915, - "Ġprecio": 46916, - "ĠChampionships": 46917, - "ê°ĢëĿ½": 46918, - "Ġvér": 46919, - "Ġcorridors": 46920, - "ĠElectronic": 46921, - "Sl": 46922, - "Ġале": 46923, - "Ġoverthrow": 46924, - "Ġkabul": 46925, - "ĠRES": 46926, - "ĠCyberpunk": 46927, - "огод": 46928, - "ĠÐĿав": 46929, - "Ġwan": 46930, - "Ġmanifestations": 46931, - "Ġcuales": 46932, - "ĠWise": 46933, - "ĠLösung": 46934, - "Ġexfol": 46935, - "Ġearns": 46936, - "ÑĥÑģÑĤиÑĤÑĮ": 46937, - "Ġsapp": 46938, - "ĠBraun": 46939, - "ĠBRANDON": 46940, - "ì¹Ļ": 46941, - "Ġsano": 46942, - "ĠFEL": 46943, - "ÑĭвайÑĤеÑģÑĮ": 46944, - "ождениÑı": 46945, - "Ġsewn": 46946, - "Fun": 46947, - "Ġreciprocal": 46948, - "Ġexpansive": 46949, - "ĠTraffic": 46950, - "Ġktórego": 46951, - "ĠÙĪس": 46952, - "æĺ¥": 46953, - "Ġ빨": 46954, - "prove": 46955, - "igare": 46956, - "Ġloh": 46957, - "اض": 46958, - "Hope": 46959, - "Ġdevotees": 46960, - "ĠGom": 46961, - "Ġsteals": 46962, - "ĠUms": 46963, - "ĠTwice": 46964, - "ãĤ²": 46965, - "iyim": 46966, - "Ġrhythmic": 46967, - "ĠVorte": 46968, - "Ġprefix": 46969, - "omination": 46970, - "Ġdato": 46971, - "Ġcustard": 46972, - "ĠVOICE": 46973, - "å·ŀ": 46974, - "Ġmeny": 46975, - "istors": 46976, - "Ġíĺij": 46977, - "ĠìĤ´ìķĦ": 46978, - "ĠíĥĦ": 46979, - "Ġkort": 46980, - "Ġaba": 46981, - "ĠVera": 46982, - "epy": 46983, - "Ġì¹´ë©ĶëĿ¼": 46984, - "Ġsubmerged": 46985, - "ĠClock": 46986, - "Ġthumbnails": 46987, - "Ġboast": 46988, - "ĠFare": 46989, - "!!]": 46990, - "ĠÅĽm": 46991, - "Ġkaikki": 46992, - "ĠTechnologies": 46993, - "ìĻ¸": 46994, - "ãĥĴ": 46995, - "иÑĤай": 46996, - "å°ıæĻĤ": 46997, - "ĠаÑĤ": 46998, - "Ġknobs": 46999, - "Ġreicht": 47000, - "ượng": 47001, - "glio": 47002, - "Ġ맼ìĿ´": 47003, - "ê°IJìĿĦ": 47004, - "Ġjotka": 47005, - "ĠHandy": 47006, - "ĠHaben": 47007, - "nous": 47008, - "Ġinland": 47009, - "Ġamazon": 47010, - "hooting": 47011, - "SL": 47012, - "Ġleisten": 47013, - "~\"": 47014, - "Ġprovoke": 47015, - "ĠTwist": 47016, - "Ġ×ij×Ĺ": 47017, - "Ġdeparted": 47018, - "ê°ľë¥¼": 47019, - "Ġkonse": 47020, - "ĠCarwyn": 47021, - "íķĺìĭł": 47022, - "idental": 47023, - "ESCO": 47024, - "Ġtteokbokki": 47025, - "Ġdizendo": 47026, - "ç·´": 47027, - "ındaki": 47028, - "imasu": 47029, - "afar": 47030, - "Ġlandfill": 47031, - "Ġcorrecting": 47032, - "Ġclears": 47033, - "ĠNummer": 47034, - "HAM": 47035, - "Ġcartridges": 47036, - "ĠDiesel": 47037, - "paced": 47038, - "Ġobliv": 47039, - "Ġmoyens": 47040, - "ĠSinne": 47041, - "ĠPreis": 47042, - "iliz": 47043, - "ĠÑģмож": 47044, - "Ġbroaden": 47045, - "ä»ĸæĺ¯": 47046, - "xes": 47047, - "Ġcarbohydrate": 47048, - "íĺ¹": 47049, - "seok": 47050, - "Ġechoes": 47051, - "Ġcess": 47052, - "ë°Ķ": 47053, - "ĠбизнеÑģ": 47054, - "Ġllamado": 47055, - "Ġessent": 47056, - "ĠìĿ¼ë°ĺ": 47057, - "ĠAires": 47058, - "phen": 47059, - "Ġzebra": 47060, - "Ġsymbolism": 47061, - "Once": 47062, - "Ġracks": 47063, - "ĠKafka": 47064, - "ĠÑģеÑĢÑĮез": 47065, - "Ġsinn": 47066, - "picious": 47067, - "kaa": 47068, - "Ġmotherfucker": 47069, - "Ġapprenticeship": 47070, - "Ġrpm": 47071, - "Ġtaxation": 47072, - "Ġfurry": 47073, - "ĠSacred": 47074, - "ĠÑĢазм": 47075, - "pora": 47076, - "enges": 47077, - "ĠíĹĪë": 47078, - "ĠÑģин": 47079, - "Ġsanitizer": 47080, - "Ġcringe": 47081, - "ĠSca": 47082, - "оÑĩно": 47083, - "Ġofere": 47084, - "Ġmelodies": 47085, - "ĠVelvet": 47086, - "ĠIhrer": 47087, - "ĠHybrid": 47088, - "ĠGiov": 47089, - "Ġirgendwas": 47090, - "Ġdepende": 47091, - "ĠUsers": 47092, - "Ġhump": 47093, - "driving": 47094, - "Ġsf": 47095, - "Ġruthless": 47096, - "à¹Ģà¸Ħ": 47097, - "Ġlemons": 47098, - "Ġföret": 47099, - "ĠOj": 47100, - "Ġмама": 47101, - "Ġinterpersonal": 47102, - "Ġgev": 47103, - "Ġabnorm": 47104, - "иÑģл": 47105, - "Ġинд": 47106, - "Ġkontroll": 47107, - "Ġregres": 47108, - "Ġledge": 47109, - "Ġerzählt": 47110, - "ĠTact": 47111, - "Ġarrivé": 47112, - "Ġsubstantive": 47113, - "Ġspoonful": 47114, - "zwischen": 47115, - "ooooo": 47116, - "Ġcontenido": 47117, - "Ġbesl": 47118, - "á»ĥm": 47119, - "kten": 47120, - "Jamie": 47121, - "Ġsandy": 47122, - "ä¸įåIJĮ": 47123, - "âĭ": 47124, - "Ġpase": 47125, - "Ġdette": 47126, - "ĠBelgian": 47127, - "ê°ľë": 47128, - "ulares": 47129, - "rud": 47130, - "igor": 47131, - "ĠíĮ¬ë": 47132, - "Ġremedies": 47133, - "Ġblasting": 47134, - "ĠSich": 47135, - "Ġожид": 47136, - "Ġmonstr": 47137, - "Ġmanifold": 47138, - "Ġglauben": 47139, - "ĠEST": 47140, - "Ġstreamline": 47141, - "Ġlobbying": 47142, - "ĠGothic": 47143, - "toire": 47144, - "..'": 47145, - "Ġdémocr": 47146, - "ĠнаблÑİд": 47147, - "Ġwspól": 47148, - "ĠczÄĻÅĽÄĩ": 47149, - "ä¸ĭéĿ¢": 47150, - "isés": 47151, - "gangen": 47152, - "Ġbezpie": 47153, - "remlin": 47154, - "ê°Ŀ": 47155, - "Still": 47156, - "Ġresides": 47157, - "Ġgelecek": 47158, - "Ġtéléphone": 47159, - "Ġpewn": 47160, - "Ġleopard": 47161, - "Ġcomplimentary": 47162, - "Ġcrib": 47163, - "ĠAnimals": 47164, - "Ġgeil": 47165, - "essel": 47166, - "Ġgarder": 47167, - "Ġcatchy": 47168, - "樹": 47169, - "ĠEts": 47170, - "ĠCommercial": 47171, - "ĠDENNIS": 47172, - "ĠCoordinator": 47173, - "ĠAbigail": 47174, - "ffffff": 47175, - "ấp": 47176, - "Ġpequeña": 47177, - "Ġinjections": 47178, - "cekt": 47179, - "Ġphilanthropy": 47180, - "Ġpuck": 47181, - "Ġcelebrates": 47182, - "ĠDunk": 47183, - "ĠDlatego": 47184, - "ãģ¾ãģł": 47185, - "δή": 47186, - "graduate": 47187, - "ĠMobil": 47188, - "till": 47189, - "acam": 47190, - "Ġyolks": 47191, - "Ġtangled": 47192, - "Ġmaniac": 47193, - "Ġobliged": 47194, - "ĠLaink": 47195, - "Ġverder": 47196, - "ĠDamon": 47197, - "Ġmutant": 47198, - "Ġhopping": 47199, - "Ġreins": 47200, - "Ġinverter": 47201, - "Ġcontempt": 47202, - "×ł×¡": 47203, - "learning": 47204, - "Miss": 47205, - "ĠÐĵоÑģ": 47206, - "ĠMeyer": 47207, - "ê»ĺìĦľ": 47208, - "é£İ": 47209, - "×ķ׳×Ļ×Ŀ": 47210, - "asking": 47211, - "Ġtrimming": 47212, - "Ġtreasury": 47213, - "Ġsente": 47214, - "Aust": 47215, - "ĠUnterstützung": 47216, - "ĠComedy": 47217, - "ĠAnakin": 47218, - "é¹": 47219, - "ÑĢÑĥÑĤ": 47220, - "ĠHari": 47221, - "ographers": 47222, - "Ġoatmeal": 47223, - "ĠBots": 47224, - "ä¸įäºĨ": 47225, - "ĠпалÑĮ": 47226, - "Ġacknowledgement": 47227, - "xic": 47228, - "Ġê´Ģìĭ¬": 47229, - "gasping": 47230, - "Ġãģķ": 47231, - "Ġterrace": 47232, - "Ġornaments": 47233, - "ĠMER": 47234, - "committee": 47235, - "ĠìĹĨìĬµëĭĪëĭ¤": 47236, - "Ġrij": 47237, - "é³": 47238, - "צ×Ŀ": 47239, - "leme": 47240, - "Ġliberties": 47241, - "Ġfellas": 47242, - "ĠCopper": 47243, - "bench": 47244, - "ĠIdea": 47245, - "á»įn": 47246, - "ÑĪа": 47247, - "Ġversión": 47248, - "ÏĦοÏį": 47249, - "ĠÐľÐ¸": 47250, - "ĠпÑĢилож": 47251, - "Ġboxer": 47252, - "ĠTanner": 47253, - "ĠMoy": 47254, - "ì¹ĺëĬĶ": 47255, - "Thr": 47256, - "Ġtinham": 47257, - "Ġpolishing": 47258, - "Ġconsequently": 47259, - "Ġamenities": 47260, - "ĠKI": 47261, - "ĠGREEN": 47262, - "ĠFrankie": 47263, - "ниÑĤ": 47264, - "ittel": 47265, - "Ñģкое": 47266, - "ursed": 47267, - "Ġupbringing": 47268, - "Ġthứ": 47269, - "ĠìĭĿìľ¼ë¡ľ": 47270, - "Ġwhim": 47271, - "Ġchinese": 47272, - "confidence": 47273, - "ĠJeder": 47274, - "ãģªãģ®ãģ§": 47275, - "ajcie": 47276, - "ĠTous": 47277, - "ĠPowers": 47278, - "ừa": 47279, - "othermal": 47280, - "ĠвÑĭÑĪе": 47281, - "rale": 47282, - "اخ": 47283, - "Ġì§ĢìĽIJ": 47284, - "Ġépisode": 47285, - "Ġsulph": 47286, - "Ġencara": 47287, - "kraft": 47288, - "aları": 47289, - "ĠComes": 47290, - "Ġdivul": 47291, - "ĠRudolph": 47292, - "ĠMuse": 47293, - "Ġutens": 47294, - "ĠìŀIJ주": 47295, - "Ġpana": 47296, - "ĠVegeta": 47297, - "ĠPHP": 47298, - "ĠNSA": 47299, - "entin": 47300, - "ĠCarnegie": 47301, - "اÙĬ": 47302, - "iÄĻcy": 47303, - "Harry": 47304, - "Ġfır": 47305, - "Сп": 47306, - "Ġgladly": 47307, - "Ġaveraging": 47308, - "íķĺê²łìĬµëĭĪëĭ¤": 47309, - "лÑıÑİÑĤÑģÑı": 47310, - "ĠÐľÐµÐ½Ñı": 47311, - "Ġquotation": 47312, - "rires": 47313, - "itchens": 47314, - "ayed": 47315, - "Ġunatt": 47316, - "ĠPerez": 47317, - "ĠоÑĤмеÑĤ": 47318, - "Ġtactile": 47319, - "ĠEuh": 47320, - "isini": 47321, - "buh": 47322, - "Ġhatır": 47323, - "ĠìŀĪìľ¼": 47324, - "Ġpolicymakers": 47325, - "³´ìĦ¸ìļĶ": 47326, - "acı": 47327, - "Ġκι": 47328, - "Ġregistering": 47329, - "reto": 47330, - "ĠSprinkle": 47331, - "ĠGrammy": 47332, - "axter": 47333, - "Ġби": 47334, - "Ġsitter": 47335, - "Ġpredic": 47336, - "Ġthinly": 47337, - "Ġstrum": 47338, - "Ġaggrav": 47339, - "Ġaha": 47340, - "رج": 47341, - "mellow": 47342, - "Ġconstante": 47343, - "ĠLaut": 47344, - "iston": 47345, - "Ġtransitioned": 47346, - "ĠCambodia": 47347, - "ãģĦãģįãģ¾ãģĻ": 47348, - "è·Łå¤§å®¶": 47349, - "arted": 47350, - "Ġmisf": 47351, - "ĠPunkte": 47352, - "Įëĵł": 47353, - "Ġtrembling": 47354, - "Ġgespannt": 47355, - "ĠعÙĦÙĬÙĩ": 47356, - "ĠникакиÑħ": 47357, - "Ġë¶Ģëĵľë": 47358, - "ĠÑĢазвиÑĤ": 47359, - "Ġitchy": 47360, - "Ġciento": 47361, - "Ġplains": 47362, - "Ġkittens": 47363, - "Ġbacklog": 47364, - "ĠPresiding": 47365, - "pta": 47366, - "Ġhavoc": 47367, - "ĠDarrin": 47368, - "ĠÐĽÑİб": 47369, - "Ġsegregated": 47370, - "Ġghetto": 47371, - "Ġerlebt": 47372, - "Ġdrugiej": 47373, - "ĠSixt": 47374, - "åıĥ": 47375, - "ระ": 47376, - "uencia": 47377, - "Ġíķĺ기": 47378, - "ĠëĨį": 47379, - "Ġrobi": 47380, - "Ġpioneers": 47381, - "Ġmilliards": 47382, - "ĠWitcher": 47383, - "Ġ무ìĹĩ": 47384, - "orro": 47385, - "mass": 47386, - "Ġdivergence": 47387, - "ĠRivera": 47388, - "ĠNoodles": 47389, - "Ġendroit": 47390, - "ĠKosten": 47391, - "ĠдÑĢÑĥга": 47392, - "ĠmÃŃnimo": 47393, - "ĠKazakhstan": 47394, - "تÙĩ": 47395, - "ĠвоздÑĥ": 47396, - "Ġgeschrieben": 47397, - "ĠNil": 47398, - "Ñģки": 47399, - "ĠFrüh": 47400, - "Ġbeverages": 47401, - "æºIJ": 47402, - "ĠGon": 47403, - "æĺ¨": 47404, - "Arin": 47405, - "ĠIntro": 47406, - "ocalyptic": 47407, - "Ġexhaustion": 47408, - "ĠStatus": 47409, - "ĠBattery": 47410, - "ész": 47411, - "£¼ë": 47412, - "airy": 47413, - "Ġë³´ìŬëĵľë": 47414, - "Ġdisparity": 47415, - "ÙĮ": 47416, - "ĠTucson": 47417, - "Ġbrightly": 47418, - "problem": 47419, - "Ġbiomass": 47420, - "éĻį": 47421, - "§ī": 47422, - "Ġhurdle": 47423, - "Ġwavelengths": 47424, - "Ġ<<": 47425, - "Ġteamed": 47426, - "FFFF": 47427, - "ĠSlim": 47428, - "omial": 47429, - "Ġunveiled": 47430, - "ĠVerein": 47431, - "ÙĤØ·": 47432, - "estry": 47433, - "Ġclás": 47434, - "Ġcheddar": 47435, - "Ġaccusing": 47436, - "ĠScientific": 47437, - "ĠбÑĥде": 47438, - "ĠCyrus": 47439, - "εÏĦε": 47440, - "Ĩĵê³ł": 47441, - "Ġë³Ħ": 47442, - "Ġcurd": 47443, - "Ġreferrals": 47444, - "shift": 47445, - "åįķ": 47446, - "ników": 47447, - "Ġmier": 47448, - "Ġconfronting": 47449, - "ê²ĥëıĦ": 47450, - "awl": 47451, - "Ġtryin": 47452, - "Ġê·¸ëŀĺìļĶ": 47453, - "Ġchiar": 47454, - "Ġìĺ¤ëĬĺëıĦ": 47455, - "æĶ¿æ²»": 47456, - "esque": 47457, - "Ġmismos": 47458, - "ĠShak": 47459, - "Ġsociaux": 47460, - "ĠpiÅŁ": 47461, - "ĠkiÅŁi": 47462, - "Ġcyan": 47463, - "hay": 47464, - "bew": 47465, - "bod": 47466, - "Ġι": 47467, - "ĠMainly": 47468, - "ÑİÑĤÑĮ": 47469, - "habitude": 47470, - "ĠÑģпокой": 47471, - "è·ŁæĪij": 47472, - "Ġprecon": 47473, - "ĠMandy": 47474, - "ðŁ¤£": 47475, - "illos": 47476, - "Ġgrupp": 47477, - "Ġcrumble": 47478, - "Ġconstructor": 47479, - "ervices": 47480, - "Ġlighthouse": 47481, - "ĠConcept": 47482, - "анÑĤи": 47483, - "altro": 47484, - "hope": 47485, - "ĠAlleg": 47486, - "ìĸ´ë¥¼": 47487, - "pieces": 47488, - "ounter": 47489, - "ĠíķĺëĭĪê¹Į": 47490, - "ĠìĿ¸íĦ°ë": 47491, - "Ġvéritable": 47492, - "Ġthreaded": 47493, - "blind": 47494, - "ĤĺëĿ¼": 47495, - "Ġtrays": 47496, - "ĠEdison": 47497, - "ĠÃĸz": 47498, - "ĠStevie": 47499, - "Ġlender": 47500, - "Ġbrigade": 47501, - "Ġdeutsche": 47502, - "muffled": 47503, - "bart": 47504, - "Ġinsanity": 47505, - "Ġsavvy": 47506, - "Ġsensational": 47507, - "Ġderechos": 47508, - "ĠMX": 47509, - "ĠпÑĢеп": 47510, - "Ġthreatens": 47511, - "ĠrealtÃł": 47512, - "Ġindicative": 47513, - "Ġchops": 47514, - "Ġbenefiting": 47515, - "ĠVernon": 47516, - "ĠStrand": 47517, - "nun": 47518, - "quently": 47519, - "101": 47520, - "Ġeel": 47521, - "ìĪĻ": 47522, - "rints": 47523, - "ĠÙħس": 47524, - "Ġبد": 47525, - "ĠпоÑģÑĤÑĢо": 47526, - "ĠyapmÄ±ÅŁ": 47527, - "Ġolması": 47528, - "Ġiedereen": 47529, - "olé": 47530, - "kef": 47531, - "Ġë°ľìĥĿ": 47532, - "Ġrained": 47533, - "Ġalmighty": 47534, - "ĠвÑĭд": 47535, - "ĠCPR": 47536, - "Fre": 47537, - "Ġinhabited": 47538, - "Ġarbets": 47539, - "Ġakin": 47540, - "аÑģÑĤв": 47541, - "vania": 47542, - "Ġhäufig": 47543, - "ĠMatte": 47544, - "sorry": 47545, - "Jenny": 47546, - "ĠгÑĢад": 47547, - "Ġwhit": 47548, - "Ġbrokers": 47549, - "å¯Ł": 47550, - "Ġhine": 47551, - "asten": 47552, - "ĠгÑĢÑĥ": 47553, - "MB": 47554, - "ĠPRI": 47555, - "Sab": 47556, - "Ġwrestler": 47557, - "Ġfacilitating": 47558, - "Ġehkä": 47559, - "ĠCred": 47560, - "Ġ127": 47561, - "Ġnothin": 47562, - "Ġmandated": 47563, - "å¯Į": 47564, - "ÑĥÑĤÑģÑĤв": 47565, - "Frank": 47566, - "Ġwors": 47567, - "ĠdzieÅĦ": 47568, - "ĠUnderground": 47569, - "Ġznajdu": 47570, - "ĠBä": 47571, - "ĠPrinzip": 47572, - "аÑĤелей": 47573, - "Ġveterinar": 47574, - "Ġsplendid": 47575, - "Ġrozp": 47576, - "Ġpsychopath": 47577, - "igon": 47578, - "Ġhops": 47579, - "Ġcần": 47580, - "ĠXian": 47581, - "Ġtroisième": 47582, - "Ġproducto": 47583, - "ĠdeÄŁer": 47584, - "ĠContinuing": 47585, - "ивал": 47586, - "cık": 47587, - "Ġmoisturizer": 47588, - "White": 47589, - "Ġsiis": 47590, - "ĠEverest": 47591, - "ienced": 47592, - "Ġcảm": 47593, - "ĠJapon": 47594, - "´ìłĦ": 47595, - "ĠtenÃŃan": 47596, - "Ġencanta": 47597, - "Mm": 47598, - "Ġdropdown": 47599, - "ĠIya": 47600, - "³´ë©´": 47601, - "Ġwording": 47602, - "ĠSqueeze": 47603, - "ĠMaple": 47604, - "Ġclarified": 47605, - "ĠMunicip": 47606, - "ĠRouge": 47607, - "ĠNicki": 47608, - "ĠGoo": 47609, - "volt": 47610, - "tek": 47611, - "fecture": 47612, - "fred": 47613, - "arrive": 47614, - "ãĥ¼ãģĦ": 47615, - "tez": 47616, - "Ep": 47617, - "Ġobras": 47618, - "ĠVID": 47619, - "ĠRiv": 47620, - "ĠModi": 47621, - "ibe": 47622, - "Ġacontecendo": 47623, - "Ġimitation": 47624, - "Ġcamouflage": 47625, - "Ġspanning": 47626, - "ĠSECRET": 47627, - "ĠOreo": 47628, - "ìĨĮ리": 47629, - "Ġhunch": 47630, - "ĠcaÅĤe": 47631, - "Ġspontaneously": 47632, - "ĠPerd": 47633, - "Ġetap": 47634, - "ĠHole": 47635, - "ĠDisability": 47636, - "Ġafterlife": 47637, - "æģ©": 47638, - "Ġtestified": 47639, - "Ġpresup": 47640, - "Ġpetroleum": 47641, - "Ġcontrario": 47642, - "ĠAssessment": 47643, - "ÄŁlu": 47644, - "Ġpests": 47645, - "Ġdilig": 47646, - "ĠвÑģÑĤÑĢеÑĤ": 47647, - "Ġconséqu": 47648, - "Ġcannons": 47649, - "Ġcanoe": 47650, - "ĠMile": 47651, - "Ġcitoy": 47652, - "Ġbegged": 47653, - "ĠMinnie": 47654, - "ÅĤych": 47655, - "Ġprincipe": 47656, - "ÏĢÏĮν": 47657, - "mniej": 47658, - "Ġwert": 47659, - "Ġëĭ¤ëĵ¤": 47660, - "anse": 47661, - "Ġuncles": 47662, - "Ġprovocative": 47663, - "Ġintersections": 47664, - "Ġdemocrats": 47665, - "ĠJulius": 47666, - "инки": 47667, - "ygusal": 47668, - "Ġ׾×ķ": 47669, - "Ġgjorde": 47670, - "Ġgasket": 47671, - "ĠBock": 47672, - "ĠÄ°n": 47673, - "breat": 47674, - "ĠEquity": 47675, - "ardı": 47676, - "Ġканале": 47677, - "Ġдней": 47678, - "ĠtỼi": 47679, - "Ġfixture": 47680, - "Ġabuses": 47681, - "Ġvaya": 47682, - "Ġouvert": 47683, - "Ġmulticultural": 47684, - "Ġcontexto": 47685, - "ĠSesame": 47686, - "Ġdépl": 47687, - "Ġconsomm": 47688, - "ĠParte": 47689, - "Ġpem": 47690, - "ĠConan": 47691, - "ĠбÑĸлÑĮ": 47692, - "Ġpersuaded": 47693, - "Ġdrains": 47694, - "Moo": 47695, - "FORE": 47696, - "ĠбаÑĤ": 47697, - "Ġfod": 47698, - "ĠProducts": 47699, - "ì§Ħì§ľ": 47700, - "Ġ\"[": 47701, - "ĠWick": 47702, - "ĠNaruto": 47703, - "нали": 47704, - "ryw": 47705, - "Ġlodge": 47706, - "Ġinh": 47707, - "Ġvontade": 47708, - "Ġdij": 47709, - "ĠJesús": 47710, - "Looking": 47711, - "Ġforearm": 47712, - "ĠIntegration": 47713, - "ĠHARRIS": 47714, - "Ġtoolbar": 47715, - "leader": 47716, - "Ġseldom": 47717, - "ĠбÑĢоÑģ": 47718, - "ĠKook": 47719, - "онд": 47720, - "Ġmonopol": 47721, - "Ġmillet": 47722, - "Ġlira": 47723, - "ĠAsians": 47724, - "Ġ1890": 47725, - "ciÄŁim": 47726, - "Ġeden": 47727, - "ĠIKEA": 47728, - "ĠNeighbor": 47729, - "ĠKazuya": 47730, - "üd": 47731, - "Ġpsychedel": 47732, - "Ġenvisioned": 47733, - "åĿĹ": 47734, - "Ġï·»": 47735, - "Ġwunder": 47736, - "ĠBulgaria": 47737, - "Brid": 47738, - "Ġmarrow": 47739, - "Ġdepiction": 47740, - "ĠTin": 47741, - "ĠPharise": 47742, - "Ġeinzige": 47743, - "Ġblindly": 47744, - "ãģĽãģ¦": 47745, - "Ġdefens": 47746, - "Dire": 47747, - "Ġvibrating": 47748, - "Ġtrolls": 47749, - "Ġdisrespectful": 47750, - "Ġwod": 47751, - "Ġstimuli": 47752, - "Ġcreeping": 47753, - "Ġclairement": 47754, - "Ġscariest": 47755, - "Ġdécouvrir": 47756, - "Ġ104": 47757, - "ĠвеÑĢÑħ": 47758, - "ĠÅĤat": 47759, - "Ġróżne": 47760, - "Ġbarley": 47761, - "ĠRepl": 47762, - "ĠTwe": 47763, - "kke": 47764, - "ĠãģĿãĤĮ": 47765, - "ĠRedmi": 47766, - "ĠMetroid": 47767, - "ĠήÏĦαν": 47768, - "Check": 47769, - "ĠSEN": 47770, - "Ġido": 47771, - "ÑĤоÑĢии": 47772, - "óp": 47773, - "UNKNOWN": 47774, - "Ġändern": 47775, - "ĠJuice": 47776, - "ĠGesicht": 47777, - "å°±æľĥ": 47778, - "ĠнаÑģÑĤолÑĮко": 47779, - "íĥķ": 47780, - "ÂŃ": 47781, - "exhales": 47782, - "Ġì´ī": 47783, - "Ġjsem": 47784, - "ÏĢÏīÏĤ": 47785, - "Ġitt": 47786, - "ëªħìĿ´": 47787, - "Ġremix": 47788, - "Ġblossoms": 47789, - "ĠRenee": 47790, - "isations": 47791, - "ìĬ¤íĦ°": 47792, - "Ġë³´ìĿ´ëĬĶ": 47793, - "uestas": 47794, - "opedia": 47795, - "ĠAim": 47796, - "ìĿ´ì¦Ī": 47797, - "scene": 47798, - "Ġleakage": 47799, - "uckt": 47800, - "Sad": 47801, - "Ask": 47802, - "Ġsuspense": 47803, - "Ġimpost": 47804, - "ĠStrategic": 47805, - "ĠItÃŃs": 47806, - "âĢĮ": 47807, - "Ġkeyboards": 47808, - "Ġamusing": 47809, - "ogr": 47810, - "iderman": 47811, - "ŀĸ": 47812, - "ĠвижÑĥ": 47813, - "Ġdips": 47814, - "Ġapologized": 47815, - "ĠSTAR": 47816, - "Ġescuela": 47817, - "ĠChing": 47818, - "нениÑı": 47819, - "Ġë¶Ģë¶ĦìĿ´": 47820, - "ĠFleet": 47821, - "Ġsamb": 47822, - "Ġentsprechend": 47823, - "Ġelectrodes": 47824, - "ĠFreiheit": 47825, - "æĪijä¸įçŁ¥éģĵ": 47826, - "ĠShrim": 47827, - "iÃŁe": 47828, - "Ġselections": 47829, - "Ġfordi": 47830, - "Ġdoss": 47831, - "ÑıÑĩ": 47832, - "Ġdiscriminate": 47833, - "ĠAuÃŁerdem": 47834, - "Ġdesenvolv": 47835, - "ĠInternal": 47836, - "ĠBenedict": 47837, - "å¯Ĩ": 47838, - "ĠShiv": 47839, - "Missy": 47840, - "ĠобнаÑĢÑĥж": 47841, - "ĠнаÑģÑĤÑĢо": 47842, - "Ġcontrolar": 47843, - "ĠLia": 47844, - "Ġopioids": 47845, - "antu": 47846, - "Ġcupboard": 47847, - "æģIJ": 47848, - "ге": 47849, - "achts": 47850, - "Ġcurated": 47851, - "Ġxem": 47852, - "Ġweary": 47853, - "Ġbrethren": 47854, - "Ġbudgeting": 47855, - "Ġpourtant": 47856, - "éļ»": 47857, - "aisia": 47858, - "ĠоÑĤвеÑĩ": 47859, - "ĠGIS": 47860, - "μαι": 47861, - "Ġש×Ķ×ķ×IJ": 47862, - "Ġsaud": 47863, - "ĠlỼ": 47864, - "ÐķТ": 47865, - "ubine": 47866, - "ĠнÑĥжен": 47867, - "Ġkidnapping": 47868, - "Ġbrat": 47869, - "ĠTerre": 47870, - "ĠMonet": 47871, - "Ġë§ĪìĬ¤íģ": 47872, - "Ġflashy": 47873, - "ĠISBN": 47874, - "Ġfreelance": 47875, - "iage": 47876, - "Ġjunge": 47877, - "충": 47878, - "ceral": 47879, - "ĠÑĤоÑĩки": 47880, - "Ġformulate": 47881, - "ĠFER": 47882, - "ĠDartmouth": 47883, - "ìľ¼ë©´ìĦľ": 47884, - "å¢ĥ": 47885, - "owiÄħ": 47886, - "ĠëĶĶìŀIJ": 47887, - "Ġregiment": 47888, - "Ġmetabolismo": 47889, - "ĠParr": 47890, - "Ġ충ë¶Ħ": 47891, - "Ġsanity": 47892, - "ĠLal": 47893, - "ĠGö": 47894, - "ĠGla": 47895, - "Ġproto": 47896, - "Ġmicroscopic": 47897, - "Ġkang": 47898, - "ĠScalia": 47899, - "Ġpug": 47900, - "ĠScore": 47901, - "ĠSavannah": 47902, - "Ġgarde": 47903, - "ĠNOR": 47904, - "å°įåIJ§": 47905, - "Ġscheint": 47906, - "ĠpóÅĤ": 47907, - "Ġcorri": 47908, - "Ġbrute": 47909, - "ĠÅĤad": 47910, - "ä»ĸ们": 47911, - "Ġsucceeding": 47912, - "Ġbicycles": 47913, - "Non": 47914, - "Ġseekers": 47915, - "Ġunconditional": 47916, - "Ġrhymes": 47917, - "ĠGarage": 47918, - "Ġinvoice": 47919, - "Ġcanvi": 47920, - "neck": 47921, - "Ġcustomizable": 47922, - "iritual": 47923, - "Queen": 47924, - "íķĺìĭľëĬĶ": 47925, - "Ġpowerless": 47926, - "Ġcsak": 47927, - "ä¸įä¼ļ": 47928, - "isoft": 47929, - "ĠìłķíĻķ": 47930, - "Ġnhân": 47931, - "ĠMAND": 47932, - "ĠHaf": 47933, - "Ġrevolves": 47934, - "ä¹Łåı¯ä»¥": 47935, - "ovan": 47936, - "aroo": 47937, - "ĠGrind": 47938, - "éĽª": 47939, - "Ġindispensable": 47940, - "Ġconsulted": 47941, - "ĠClinical": 47942, - "Acc": 47943, - "Ġolhos": 47944, - "Ġmonter": 47945, - "ĠHana": 47946, - "etah": 47947, - "Ġvaan": 47948, - "Ġtigers": 47949, - "Ġcaucus": 47950, - "ðŁĺĤ": 47951, - "³´ìŀIJ": 47952, - "powers": 47953, - "iums": 47954, - "ĠíĨłë": 47955, - "Ġtradicional": 47956, - "Ġresonated": 47957, - "Ġìĭłê¸°": 47958, - "them": 47959, - "Robert": 47960, - "Ġelemento": 47961, - "Ġantid": 47962, - "ĠобÑģ": 47963, - "Ġnatives": 47964, - "Ġloca": 47965, - "owment": 47966, - "ĠTight": 47967, - "ĠæĢĿ": 47968, - "Ġmelan": 47969, - "ĠNue": 47970, - "amis": 47971, - "Ġsorgen": 47972, - "asına": 47973, - "Home": 47974, - "ĠPUBG": 47975, - "Ġawfully": 47976, - "ĠShore": 47977, - "ĠPerché": 47978, - "ĠLau": 47979, - "ĠCinderella": 47980, - "ĠChest": 47981, - "Ġsemantic": 47982, - "Ġdeserted": 47983, - "ĠMomo": 47984, - "ĠHernandez": 47985, - "genes": 47986, - "ĠAdult": 47987, - "иÑĩеÑģкого": 47988, - "oshima": 47989, - "ĠcaracterÃŃsticas": 47990, - "ĠKL": 47991, - "´ìŀ¥": 47992, - "ocar": 47993, - "Ġfehlt": 47994, - "Ġdruk": 47995, - "ĠPoppy": 47996, - "ENGLISH": 47997, - "ĠVergleich": 47998, - "Brien": 47999, - "Ġrecomp": 48000, - "ĠÑģд": 48001, - "Ġmerger": 48002, - "Ġmarketers": 48003, - "Ġhoneymoon": 48004, - "Ġpenso": 48005, - "Ġbelli": 48006, - "еÑĤÑĥ": 48007, - "Ġbanker": 48008, - "Camera": 48009, - "ĠStall": 48010, - "ĠStamp": 48011, - "ĠBite": 48012, - "ежде": 48013, - "Ġsür": 48014, - "Ġgüç": 48015, - "ĠPassover": 48016, - "ĠBugün": 48017, - "ĠÑģожалениÑİ": 48018, - "Ġниз": 48019, - "Ġmanure": 48020, - "Ġglacier": 48021, - "è«ĩ": 48022, - "RAY": 48023, - "terror": 48024, - "Ġsalads": 48025, - "Ġhurricanes": 48026, - "ĠDesigner": 48027, - "atorio": 48028, - "Ġfactual": 48029, - "ĠTammy": 48030, - "ĠзвÑĥÑĩ": 48031, - "Ġintroductions": 48032, - "Ġhousekeeping": 48033, - "Ġhanger": 48034, - "ëĭĺë": 48035, - "akte": 48036, - "ĠCola": 48037, - "']": 48038, - "ĠGender": 48039, - "оÑĢон": 48040, - "ipse": 48041, - "icias": 48042, - "Ġsuccessive": 48043, - "Ġpolitic": 48044, - "Ġhöher": 48045, - "ĠQiao": 48046, - "ĠGimme": 48047, - "Ġлож": 48048, - "Ġseb": 48049, - "ĠWeiter": 48050, - "ĠSakura": 48051, - "ĠBoulder": 48052, - "ĠAmérica": 48053, - "peÅĤnie": 48054, - "ĠtecnologÃŃa": 48055, - "ishops": 48056, - "fur": 48057, - "Ġmoonlight": 48058, - "Ġdispersed": 48059, - "Ġrez": 48060, - "енное": 48061, - "алÑĮнÑĥÑİ": 48062, - "ĠTwelve": 48063, - "ĠHOR": 48064, - "ìĭ¤íŀĪ": 48065, - "ilage": 48066, - "Ġshaded": 48067, - "Ġresumes": 48068, - "ĠPeanut": 48069, - "ĠMILL": 48070, - "apons": 48071, - "ĠUFC": 48072, - "ĠSole": 48073, - "Ġjoystick": 48074, - "ĠOlivier": 48075, - "warming": 48076, - "Ġsyllabus": 48077, - "ĠобÑīе": 48078, - "Ġhiá»ĩn": 48079, - "Ġfesta": 48080, - "Ġcradle": 48081, - "ĠZac": 48082, - "Ġremembrance": 48083, - "Ġê°ĻìķĦìĦľ": 48084, - "ĠpiÄĻk": 48085, - "Ġcoexist": 48086, - "ĠVII": 48087, - "Ġáreas": 48088, - "Ġuważ": 48089, - "Ġobservers": 48090, - "Ġmänniskor": 48091, - "coon": 48092, - "ĠDAM": 48093, - "Ġnaszym": 48094, - "Ġalligator": 48095, - "ĠFreeze": 48096, - "ĠEstate": 48097, - "ĠÑĤÑĢади": 48098, - "Ġundercover": 48099, - "Ġnies": 48100, - "ĠFehler": 48101, - "plin": 48102, - "ĠKabul": 48103, - "ilate": 48104, - "Ġê³łìĸij": 48105, - "Ġmop": 48106, - "ìĦ¼": 48107, - "Ġanderer": 48108, - "ĠKELL": 48109, - "оки": 48110, - "ĠжеÑģÑĤ": 48111, - "Ġgrazing": 48112, - "ĠdaÃŃ": 48113, - "Ġcapitalize": 48114, - "Ġapex": 48115, - "Ġnurturing": 48116, - "Ġcortar": 48117, - "Ġcontrac": 48118, - "ımızı": 48119, - "Ġtandem": 48120, - "éĥ½æľī": 48121, - "gement": 48122, - "ĠÑģиÑģÑĤема": 48123, - "Ġmanque": 48124, - "iajÄħ": 48125, - "WOR": 48126, - "Ġاب": 48127, - "Ġcarts": 48128, - "ANO": 48129, - "Ġë°Ľê³ł": 48130, - "ĠCena": 48131, - "ĠBiology": 48132, - "idar": 48133, - "Ġaż": 48134, - "erne": 48135, - "anu": 48136, - "Ġthanked": 48137, - "Ġsubmarines": 48138, - "Ġmanic": 48139, - "Ġмоз": 48140, - "ä¼Ĭ": 48141, - "instant": 48142, - "essential": 48143, - "Ġsamurai": 48144, - "Ġpasti": 48145, - "Ġalan": 48146, - "Ġbroch": 48147, - "Ġbaker": 48148, - "ĠGuill": 48149, - "¨¼": 48150, - "Ġwithdrawn": 48151, - "ëĭĿ": 48152, - "Perfect": 48153, - "quency": 48154, - "Ġstreamlined": 48155, - "Ġ1300": 48156, - "´ëıĦ": 48157, - "Ġëĸłë": 48158, - "Ġãģ¯ãģĦ": 48159, - "Ġhvad": 48160, - "ä¸Ģå®ļè¦ģ": 48161, - "Ġverbally": 48162, - "ĠKons": 48163, - "Ġì¡°ìĭ¬": 48164, - "Ġdiez": 48165, - "æİ°æİ°": 48166, - "Ġchuckling": 48167, - "ĠMih": 48168, - "Ġrallies": 48169, - "Ġmanter": 48170, - "Ġearnest": 48171, - "super": 48172, - "Ġgece": 48173, - "ĠRend": 48174, - "ĠGerade": 48175, - "jenigen": 48176, - "ĠVall": 48177, - "ĠìŀĪëĤĺ": 48178, - "ĠÑģказала": 48179, - "Ġtrabalh": 48180, - "ĠнаÑĪем": 48181, - "ĠмеÑħ": 48182, - "ikit": 48183, - "Ġnouns": 48184, - "Ġneurological": 48185, - "Ġmotivational": 48186, - "ĠMcMahon": 48187, - "ĠFinished": 48188, - "Ġë³´ìĿ´": 48189, - "ĠFields": 48190, - "Ġadolescents": 48191, - "ĠTisch": 48192, - "ĠNeben": 48193, - "ĠFlowers": 48194, - "ĠEnerg": 48195, - "Ġdiret": 48196, - "ĠThi": 48197, - "ĠPicas": 48198, - "æĥľ": 48199, - "æĢİä¹Īæł·": 48200, - "Ġavete": 48201, - "ĠFors": 48202, - "ĠChapel": 48203, - "Não": 48204, - "Et": 48205, - "ĠÑģодеÑĢж": 48206, - "reno": 48207, - "Ġsven": 48208, - "ĠdostÄĻp": 48209, - "nee": 48210, - "ĠSnapdragon": 48211, - "ĠIDs": 48212, - "ìķĺëĬĶëį°": 48213, - "ר×ļ": 48214, - "Ġsunflower": 48215, - "Ġperpetual": 48216, - "ç³ĸ": 48217, - "Ġknights": 48218, - "Ġgird": 48219, - "ĠTold": 48220, - "Ġvolcanoes": 48221, - "Ġadversary": 48222, - "ĠEconomy": 48223, - "Ġextrapol": 48224, - "Ġbluetooth": 48225, - "Ġzooming": 48226, - "Ġskys": 48227, - "Ġgenial": 48228, - "ÃŃculos": 48229, - "ambre": 48230, - "ĠмеÑĢ": 48231, - "Ġteeny": 48232, - "Ġstressing": 48233, - "ìķĮ": 48234, - "ONY": 48235, - "Ġtranslucent": 48236, - "Ġrounding": 48237, - "Ġgrues": 48238, - "×Ļ׳×Ķ": 48239, - "après": 48240, - "Ġprueba": 48241, - "Ġpolygon": 48242, - "Ġblueberry": 48243, - "ĠProgramm": 48244, - "Ġtrenches": 48245, - "Ġsebagai": 48246, - "Ġpalate": 48247, - "Ġlaude": 48248, - "Ġbehaved": 48249, - "Ġlongitudinal": 48250, - "ĠModule": 48251, - "Ġadmir": 48252, - "λι": 48253, - "Greg": 48254, - "Ġwyst": 48255, - "Ġpropagate": 48256, - "Ġmolds": 48257, - "ĠTub": 48258, - "ĠLoud": 48259, - "usto": 48260, - "Ġunstoppable": 48261, - "Ġreinforcing": 48262, - "éĿŀ常çļĦ": 48263, - "ĠпÑĢоблема": 48264, - "Ġpotencial": 48265, - "Ġhemp": 48266, - "ìŀĶ": 48267, - "य": 48268, - "Ġoptic": 48269, - "Ġerfolgreich": 48270, - "ÑģÑĭ": 48271, - "олÑĮÑĪе": 48272, - "urst": 48273, - "ĠPois": 48274, - "Ġrespondents": 48275, - "Ġnehme": 48276, - "ĠExternal": 48277, - "olate": 48278, - "Hyun": 48279, - "Ġquartz": 48280, - "Ġmathematician": 48281, - "Ġbásicamente": 48282, - "Ġail": 48283, - "ìłľë¥¼": 48284, - "attutto": 48285, - "Ġnooit": 48286, - "Ġafflict": 48287, - "ĠOlga": 48288, - "èŃ·": 48289, - "ĠнаÑĤ": 48290, - "Ġdites": 48291, - "Ġrealidade": 48292, - "Ġkän": 48293, - "Ġuniqueness": 48294, - "Ġpadres": 48295, - "Ġsubsidi": 48296, - "Ġpigeons": 48297, - "βα": 48298, - "stad": 48299, - "Ġderen": 48300, - "ĠСлед": 48301, - "doo": 48302, - "ĠопиÑģании": 48303, - "Ġamber": 48304, - "Ġgoosebumps": 48305, - "ĠfrÃ¥gor": 48306, - "ĠVital": 48307, - "ĠIsraelites": 48308, - "wasser": 48309, - "Isn": 48310, - "Ġcommits": 48311, - "ĠSTEVEN": 48312, - "ĠBevölker": 48313, - "uitive": 48314, - "Ġlegen": 48315, - "Ġbruk": 48316, - "иÑĢован": 48317, - "ynen": 48318, - "helm": 48319, - "Ġgenerational": 48320, - "ĠLändern": 48321, - "οιÏĢÏĮν": 48322, - "uzu": 48323, - "Ġcaller": 48324, - "онÑĮ": 48325, - "ümü": 48326, - "Ġbesar": 48327, - "Ġplats": 48328, - "Ġmigrated": 48329, - "Ġjap": 48330, - "ĠWAR": 48331, - "Ġdissect": 48332, - "ĠZusch": 48333, - "ĠZeiten": 48334, - "ĠLions": 48335, - "ĠDF": 48336, - "âĶ": 48337, - "кив": 48338, - "Ġpedestrians": 48339, - "ĠMarilyn": 48340, - "dock": 48341, - "Ġyht": 48342, - "Ġreincarn": 48343, - "ĠSono": 48344, - "ĠGrowth": 48345, - "ÑĥÑģов": 48346, - "Ġdungeons": 48347, - "Ġbagus": 48348, - "kich": 48349, - "ĠÑĥкÑĢаÑĹ": 48350, - "éĨ«": 48351, - "ĠKeller": 48352, - "chemistry": 48353, - "Japanese": 48354, - "Ġwillst": 48355, - "Ġdecomposition": 48356, - "ĠÑģÑĤен": 48357, - "Ġrevived": 48358, - "íķĻêµIJ": 48359, - "ĠÅĵ": 48360, - "ä½IJ": 48361, - "ìĭ¸": 48362, - "ippy": 48363, - "Ġhourly": 48364, - "jän": 48365, - "ĠWorkshop": 48366, - "Ŀ¼ìĦľ": 48367, - "Ġcuarto": 48368, - "Ġpatrim": 48369, - "ĠBurch": 48370, - "ĠìŀĪ기": 48371, - "Ġhepat": 48372, - "ĠhÃłng": 48373, - "ĠëĮĢíķ´": 48374, - "ĠваÑĪи": 48375, - "Ġrework": 48376, - "Ġparse": 48377, - "Ġçıktı": 48378, - "ĠSax": 48379, - "ĠMongo": 48380, - "ĠAaah": 48381, - "ramble": 48382, - "DJ": 48383, - "Ġstabilized": 48384, - "ĠSpeech": 48385, - "Books": 48386, - "Ġhurdles": 48387, - "ĠWO": 48388, - "ĠLamborg": 48389, - "Ġ1933": 48390, - "Ġvorbere": 48391, - "Ġclinically": 48392, - "Ġbreathtaking": 48393, - "ĠGateway": 48394, - "пеÑĢвÑĭÑħ": 48395, - "uters": 48396, - "Ġë¹µ": 48397, - "Ġyeter": 48398, - "Ġpulley": 48399, - "Ġmuffin": 48400, - "ĠPrefer": 48401, - "ĠPence": 48402, - "Ġinformação": 48403, - "ìĬ¤íĬ¸ë": 48404, - "ãĤ¸ãĥ£": 48405, - "ĠTurtle": 48406, - "ĠRegina": 48407, - "ĠLoad": 48408, - "does": 48409, - "panze": 48410, - "¸Ķ": 48411, - "Ġmina": 48412, - "ĠLatinos": 48413, - "ammers": 48414, - "ĠTort": 48415, - "ĠBeyonce": 48416, - "имоÑģÑĤи": 48417, - "ĠвопÑĢоÑģÑĭ": 48418, - "Ġbulun": 48419, - "èĢĮå·²": 48420, - "inek": 48421, - "bereich": 48422, - "Ġpasture": 48423, - "ĠOA": 48424, - "ĠMelt": 48425, - "ĠEtt": 48426, - "ĠDY": 48427, - "Ġobwohl": 48428, - "Ġleagues": 48429, - "ÑĤеÑģÑĮ": 48430, - "ĠкÑĥÑģ": 48431, - "Ġvors": 48432, - "Ġtopp": 48433, - "ographical": 48434, - "asst": 48435, - "Ġlindo": 48436, - "Ġë°ĿíĺĶ": 48437, - "Ġréfl": 48438, - "Ġclimbs": 48439, - "Ġvarsa": 48440, - "Ġmethyl": 48441, - "ĠKarere": 48442, - "Æ°á»Ł": 48443, - "Rad": 48444, - "Ġpreparedness": 48445, - "онÑĩ": 48446, - "ĠOD": 48447, - "ĠCGI": 48448, - "Ġम": 48449, - "Ġspeechless": 48450, - "Ġlasci": 48451, - "Ġbolag": 48452, - "ĠÑħоÑĩеÑĤÑģÑı": 48453, - "Ġgrieving": 48454, - "ĠJohannes": 48455, - "ĠCarroll": 48456, - "adaki": 48457, - "Ī¬ë": 48458, - "ĠsÅĤu": 48459, - "Ġinnerhalb": 48460, - "Ġgymnastics": 48461, - "пÑĢи": 48462, - "ifiques": 48463, - "Ġkarate": 48464, - "Ġdomu": 48465, - "ãģĿãĤĮãģ§": 48466, - "OTHER": 48467, - "Ġdemandé": 48468, - "Ġbooklet": 48469, - "ĠKyoto": 48470, - "Ġwoh": 48471, - "ĠMarÃŃa": 48472, - "violent": 48473, - "JE": 48474, - "Ġlóg": 48475, - "Ġbrutally": 48476, - "cot": 48477, - "ĠÙħÛĮ": 48478, - "ĠWarsz": 48479, - "å®Ī": 48480, - "wol": 48481, - "Ġmikä": 48482, - "ĠPronounce": 48483, - "ĠBrendan": 48484, - "Ġroup": 48485, - "Ġitaliano": 48486, - "å¦ĤæѤ": 48487, - "ĠкомпÑĮÑİÑĤ": 48488, - "Ġurging": 48489, - "edes": 48490, - "Ġcarbono": 48491, - "ĠRichardson": 48492, - "ĠÐĿаÑĩ": 48493, - "ĠTrainer": 48494, - "ĠCrimea": 48495, - "Ġdiapers": 48496, - "Ġcovet": 48497, - "ĠMahar": 48498, - "ĠHutch": 48499, - "ĠAusw": 48500, - "berty": 48501, - "Ġindifferent": 48502, - "кÑĢеÑĤ": 48503, - "uldade": 48504, - "Ġharms": 48505, - "¢ÙĨ": 48506, - "lesia": 48507, - "Ġgio": 48508, - "ĠMistress": 48509, - "ĠKnox": 48510, - "ĠFREE": 48511, - "Ġ루ë": 48512, - "ĠнаÑĪа": 48513, - "Ġinvincible": 48514, - "Ġmaiden": 48515, - "ĠJeez": 48516, - "Ġbreve": 48517, - "pole": 48518, - "Ġcriticisms": 48519, - "ĠRusia": 48520, - "म": 48521, - "phin": 48522, - "ĠCompare": 48523, - "ĠBON": 48524, - "Ġsneaking": 48525, - "ĠRails": 48526, - "ĠGeral": 48527, - "Ġ1953": 48528, - "Hola": 48529, - "ĠопÑĭÑĤ": 48530, - "Ġrainforest": 48531, - "Ġbelum": 48532, - "ĠObi": 48533, - "ĠISS": 48534, - "ãĤĮãģªãģĦ": 48535, - "ĠСв": 48536, - "Ġblond": 48537, - "Ġwzgl": 48538, - "ĠpowiedziaÅĤ": 48539, - "Ġchoking": 48540, - "ĠSongs": 48541, - "ĠBiraz": 48542, - "Ġyells": 48543, - "Ġstylist": 48544, - "ÏĮÏĦε": 48545, - "Ġschreiben": 48546, - "ĠJaw": 48547, - "ĠEleven": 48548, - "ĠRif": 48549, - "/.": 48550, - "Ġìĺ¤ëŀľë§Į": 48551, - "Ġtreaties": 48552, - "uffed": 48553, - "ĠâĪĴ": 48554, - "Ġroofs": 48555, - "à¹Ģส": 48556, - "Ġë»": 48557, - "Ġsparkle": 48558, - "ĠKiev": 48559, - "ĠArgu": 48560, - "erecht": 48561, - "ĠÐĿадо": 48562, - "ĠFIL": 48563, - "Ġmolta": 48564, - "ĠDevi": 48565, - "Ġcampe": 48566, - "Ġbenevol": 48567, - "ĠTough": 48568, - "Ġmoim": 48569, - "Ġevacuate": 48570, - "Ġerrado": 48571, - "å©Ĩ": 48572, - "ÑĢÑĥго": 48573, - "Ġíİĺ": 48574, - "ĠÎĵια": 48575, - "Ġweaken": 48576, - "Ġilluminated": 48577, - "Ġsiglo": 48578, - "ĠVacc": 48579, - "ией": 48580, - "alis": 48581, - "ĠÑĥÑģÑĤÑĢой": 48582, - "Ġdona": 48583, - "ÅĤos": 48584, - "üman": 48585, - "Ġproducción": 48586, - "Ġclot": 48587, - "ĠMango": 48588, - "Ġuneasy": 48589, - "Ġshuts": 48590, - "ĠExamples": 48591, - "vell": 48592, - "ebe": 48593, - "Ġpromptly": 48594, - "ĠTeles": 48595, - "ĠпÑĢоÑĪл": 48596, - "Ġpuerta": 48597, - "Ġüberzeug": 48598, - "Ġcoch": 48599, - "social": 48600, - "ĠBenson": 48601, - "ĠMeth": 48602, - "ĠExped": 48603, - "Ġsupplemental": 48604, - "Ġconceive": 48605, - "Ġ×ĺ×ķ×ij": 48606, - "Ġcaptivity": 48607, - "ıĻìķĪ": 48608, - "ĠÑħÑĥд": 48609, - "forming": 48610, - "Ġuploads": 48611, - "Ġturbulence": 48612, - "joint": 48613, - "Ġsatisfactory": 48614, - "ĠAnime": 48615, - "Ġwashes": 48616, - "Ġliberals": 48617, - "ĠSunshine": 48618, - "ĠREAL": 48619, - "ublik": 48620, - "binary": 48621, - "Tony": 48622, - "Ġpolarized": 48623, - "Ġenriched": 48624, - "taking": 48625, - "ĠëģĿëĤĺ": 48626, - "Ġpleasures": 48627, - "Ġextermin": 48628, - "inese": 48629, - "atl": 48630, - "vär": 48631, - "аÑĢÑĭ": 48632, - "ĠmyÅĽ": 48633, - "narrator": 48634, - "Ġодном": 48635, - "ĠnajwiÄĻ": 48636, - "Ġmobilize": 48637, - "Ġmillor": 48638, - "Ġata": 48639, - "æ··": 48640, - "ĠpolÃŃtico": 48641, - "Ġplead": 48642, - "Ġpainters": 48643, - "ĠSow": 48644, - "оÑĦ": 48645, - "ĠìĺĽëĤł": 48646, - "ĠÑĩÑĤоб": 48647, - "Ġsabor": 48648, - "ĠUndert": 48649, - "ĠJERRY": 48650, - "Å¡ÃŃ": 48651, - "Ġë°ĸìĹIJ": 48652, - "Ġprécéd": 48653, - "Ġannotation": 48654, - "ĠInaudible": 48655, - "Ġtextured": 48656, - "Ġfisherman": 48657, - "vordan": 48658, - "icherung": 48659, - "ĠìłģìĿ´": 48660, - "Ġgezeigt": 48661, - "Ġmandates": 48662, - "Ġbeak": 48663, - "ĠTWO": 48664, - "ĠAkbar": 48665, - "ilian": 48666, - "Ġtiếp": 48667, - "Ġsuperiority": 48668, - "inku": 48669, - "Ġlys": 48670, - "ĠFCC": 48671, - "ĠCPA": 48672, - "ustering": 48673, - "nicos": 48674, - "anja": 48675, - "Ġchills": 48676, - "ĠCage": 48677, - "Ġsealing": 48678, - "Ġsaç": 48679, - "Ġdedans": 48680, - "ĠAlger": 48681, - "Ġspezie": 48682, - "Ġcoloss": 48683, - "ıyı": 48684, - "clockwise": 48685, - "Ġexactamente": 48686, - "Ġiemand": 48687, - "amı": 48688, - "Ġmandar": 48689, - "raj": 48690, - "faced": 48691, - "agua": 48692, - "Ġê¹Ķë": 48693, - "Ġinsbesondere": 48694, - "Ġdrizzle": 48695, - "Ġdiminish": 48696, - "ĠYoda": 48697, - "AI": 48698, - "Ġbilmiyorum": 48699, - "ĠMMA": 48700, - "ategory": 48701, - "ĠпеÑĢеп": 48702, - "Ġparticipar": 48703, - "Ġnormalized": 48704, - "Ġcomplexities": 48705, - "æ´²": 48706, - "æݧ": 48707, - "аÑĢов": 48708, - "mist": 48709, - "icha": 48710, - "Group": 48711, - "Ġresiliency": 48712, - "Ġnogle": 48713, - "ĠCNC": 48714, - "prü": 48715, - "Ġphysicists": 48716, - "нок": 48717, - "LI": 48718, - "Ġstuffs": 48719, - "Ġsistemas": 48720, - "Ġinterfering": 48721, - "ĠMarvin": 48722, - "ército": 48723, - "ĠìĹĨê³ł": 48724, - "Ġsonic": 48725, - "Ġequiv": 48726, - "Ġabord": 48727, - "ĠRamen": 48728, - "Ġ09": 48729, - "medim": 48730, - "atiques": 48731, - "ĠделаÑİÑĤ": 48732, - "Ġunanimously": 48733, - "Ġskirts": 48734, - "ĠíĬ¹ë³Ħ": 48735, - "ĠPrix": 48736, - "kami": 48737, - "Ġfruition": 48738, - "Ġbirthdays": 48739, - "иком": 48740, - "Ġinaugural": 48741, - "Ġcorrelate": 48742, - "ĠTory": 48743, - "ĠëĤĺìģ": 48744, - "Ġdew": 48745, - "ĠPrecis": 48746, - "ihi": 48747, - "Ġë¬¸ìłľê°Ģ": 48748, - "Ġciting": 48749, - "ĠLana": 48750, - "ĠKag": 48751, - "Ġplaythrough": 48752, - "ĠProtocol": 48753, - "frist": 48754, - "hovah": 48755, - "Ġmerciful": 48756, - "Ġbilingual": 48757, - "ĠGuitar": 48758, - "rh": 48759, - "Ġglamorous": 48760, - "ĠVikings": 48761, - "ĠOoooh": 48762, - "íķĺëĬĶëį°": 48763, - "ĠUganda": 48764, - "Ġcollapses": 48765, - "entry": 48766, - "Ġantioxidants": 48767, - "ëĤĺë": 48768, - "ÑĪаÑı": 48769, - "Ġtrivia": 48770, - "Ġgäller": 48771, - "Ġfungi": 48772, - "Ġmilks": 48773, - "Ġdicht": 48774, - "μη": 48775, - "poke": 48776, - "ĠвÑĭпÑĥÑģк": 48777, - "Ġfeeder": 48778, - "ĠAlcohol": 48779, - "hower": 48780, - "Ġdeserving": 48781, - "ĠRebel": 48782, - "iosis": 48783, - "Ġ103": 48784, - "Ġhandout": 48785, - "Ġenm": 48786, - "Ġlandlords": 48787, - "Ġgeology": 48788, - "rils": 48789, - "Ġcobra": 48790, - "ĠVold": 48791, - "ĠPanch": 48792, - "ĠGREG": 48793, - "Ġpross": 48794, - "Ġbracelets": 48795, - "ĠVega": 48796, - "Ġrozum": 48797, - "款": 48798, - "азд": 48799, - "ĠLynd": 48800, - "ĠHonors": 48801, - "Ġsurrendered": 48802, - "Ġlibrarians": 48803, - "125": 48804, - "ĠÑģиг": 48805, - "Ġuniformly": 48806, - "ĠEagles": 48807, - "ìķĻ": 48808, - "иÑĤан": 48809, - "andid": 48810, - "ĠìłĪëĮĢ": 48811, - "Ġض": 48812, - "Ġarrests": 48813, - "ĠCSV": 48814, - "ĠAzerbaijan": 48815, - "ortic": 48816, - "ĠDX": 48817, - "ĠAdventures": 48818, - "Ġabus": 48819, - "ĠFau": 48820, - "Ġschlimm": 48821, - "Ġrattling": 48822, - "Ġconsumes": 48823, - "ĠTolkien": 48824, - "Ġresurrected": 48825, - "ĠXY": 48826, - "íĬ¸ê°Ģ": 48827, - "ĠвÑĭÑģÑĤÑĥп": 48828, - "ĠAngie": 48829, - "żenia": 48830, - "Mic": 48831, - "ĠSheila": 48832, - "achtet": 48833, - "Ġoverst": 48834, - "Ġlâ": 48835, - "Ġineffective": 48836, - "æĿ¡": 48837, - "æĢİä¹ĪäºĨ": 48838, - "å¿Ļ": 48839, - "Ġwichtiger": 48840, - "Ġvino": 48841, - "Ġpum": 48842, - "Ġangled": 48843, - "ĠPione": 48844, - "ĠMỹ": 48845, - "ãģĿãĤĮãģ¯": 48846, - "woÅĽÄĩ": 48847, - "draw": 48848, - "ัà¹Ī": 48849, - "markets": 48850, - "Ġcafes": 48851, - "ĠCem": 48852, - "âĿ¤": 48853, - "ĠSuit": 48854, - "MK": 48855, - "Ġemphasizes": 48856, - "Ġtortilla": 48857, - "Ġmejorar": 48858, - "ĠSurviv": 48859, - "casting": 48860, - "Ġeducación": 48861, - "ĠGum": 48862, - "uely": 48863, - "ĠìĹ¬ê¸°ëĬĶ": 48864, - "Ġstretchy": 48865, - "ença": 48866, - "Ġwithhold": 48867, - "Ġexiting": 48868, - "Ġenthalpy": 48869, - "ĠTransit": 48870, - "ılmÄ±ÅŁ": 48871, - "alies": 48872, - "Ġsalvar": 48873, - "Ġleaned": 48874, - "ĠgroÃŁes": 48875, - "Ġfitt": 48876, - "аки": 48877, - "Sarah": 48878, - "Ġhostel": 48879, - "Ġfingerna": 48880, - "ĠnadziejÄĻ": 48881, - "wives": 48882, - "Rec": 48883, - "Ġspool": 48884, - "аÑĤов": 48885, - "ĠEnemy": 48886, - "Ġfury": 48887, - "Ġdetta": 48888, - "ĠFay": 48889, - "éļ¨": 48890, - "ÑıÑİÑĤ": 48891, - "Ġaproximadamente": 48892, - "Ġsilos": 48893, - "Ġmagist": 48894, - "Ġcree": 48895, - "ĠKrank": 48896, - "ĠDOWN": 48897, - "Ġstartled": 48898, - "Ġreborn": 48899, - "ĠUmwelt": 48900, - "ĠSuzanne": 48901, - "ниÑĨÑĭ": 48902, - "outez": 48903, - "ĠJAC": 48904, - "yards": 48905, - "radas": 48906, - "rau": 48907, - "ipts": 48908, - "hail": 48909, - "Ġparagraphs": 48910, - "Ġmeglio": 48911, - "Ġisolating": 48912, - "Ġaceite": 48913, - "ĠHarsh": 48914, - "Ġcyst": 48915, - "ĠBlockchain": 48916, - "ĠÑħоÑĢоÑĪий": 48917, - "Ġvirtuous": 48918, - "Ġinvestigación": 48919, - "Ġdevoir": 48920, - "Ġmasturb": 48921, - "ĠSale": 48922, - "ÙĬرة": 48923, - "ĠΧ": 48924, - "ĠStraÃŁen": 48925, - "Ġdikk": 48926, - "Ġafore": 48927, - "ĠJungkook": 48928, - "Ġchociaż": 48929, - "ĠDebatte": 48930, - "Ġweirdly": 48931, - "Ġviaje": 48932, - "regist": 48933, - "Help": 48934, - "Ġkinderen": 48935, - "Ġformulated": 48936, - "Ġenfim": 48937, - "ĠTowards": 48938, - "коÑĹ": 48939, - "ivering": 48940, - "ĠдеÑĤи": 48941, - "charger": 48942, - "Ġpurl": 48943, - "Ġacademically": 48944, - "ĠNurse": 48945, - "Ġdeleting": 48946, - "ayo": 48947, - "Ġrefusal": 48948, - "Ġdepicts": 48949, - "ĠDracula": 48950, - "Ġtoasted": 48951, - "ĠZombie": 48952, - "ĠSuperior": 48953, - "ĠBold": 48954, - "Ġquizzes": 48955, - "Ġgle": 48956, - "450": 48957, - "Ġcomeço": 48958, - "ynn": 48959, - "Ġverst": 48960, - "ĠOlaf": 48961, - "Ġpomoc": 48962, - "ĠSask": 48963, - "ëĺ": 48964, - "ĠTCP": 48965, - "ĠProperty": 48966, - "íķĺì£ł": 48967, - "à¸ľà¸¡": 48968, - "boom": 48969, - "aros": 48970, - "ĠÑĢоÑģÑģий": 48971, - "ĠбÑĭваеÑĤ": 48972, - "åĩºåİ»": 48973, - "ĠìĿ´ìķ¼ê¸°ë¥¼": 48974, - "Ġcombien": 48975, - "vacc": 48976, - "Ġebenfalls": 48977, - "para": 48978, - "Ġзм": 48979, - "Ġdesperation": 48980, - "ordre": 48981, - "Ġש׾×Ļ": 48982, - "Ġgenerously": 48983, - "ĠÐŀк": 48984, - "Ġorbiting": 48985, - "> >", - "r u", - "w n", - "on t", - "i b", - "e ll", - "Ġs m", - "ot h", - "u al", - "Ġ >>", - "Ġp h", - "l es", - "o c", - "f ul", - "Ġse c", - "is e", - "Ġad d", - "ig h", - "er t", - "Ġs ame", - "â Ģ", - "Ġme an", - "Ġf ind", - "e k", - "Ġen d", - "- -", - "Ð ¼", - "Ġst ill", - "a z", - "Ġ '", - "Ġm in", - "Ġye ars", - "ur n", - "Ġar ound", - "sel f", - "Ġw r", - "b s", - "oug ht", - "ĠâĻ ª", - "Ġf l", - "an ge", - "Ġa fter", - "Ġpo int", - "m er", - "v ed", - "Ġl ong", - "o y", - "ä ¸", - "Ġc r", - "way s", - "Ġs y", - "Ġt ra", - "Ġ2 0", - "a ve", - "Ġch e", - "Ġ ent", - "Ġbe fore", - "p h", - "Ġat t", - "i an", - "i ly", - "Ġpers on", - "Ġb ig", - "Ġs ch", - "Ġre al", - "Ġne xt", - "Ġlo ve", - "Ġvide o", - "ĠL et", - "Ġf in", - "Ġma k", - "i ble", - "Ġto day", - "er m", - "ĠA l", - "ow er", - "an n", - "i x", - "Ġp ar", - "Ġst ud", - "à ¶", - "Ġimp ort", - "t e", - "Ġg ive", - "v es", - "Ġd ie", - "Ġde c", - "Ġte ll", - "ĠÐ º", - "Ñģ ÑĤ", - "Ġwh y", - "ic ally", - "ic t", - "re d", - "Ġb as", - "Ġsu re", - "Ġbe l", - "at ing", - "Ġt ak", - "Ġs et", - "Ġl ife", - "Ġdid n", - "Ø §", - "o b", - "u nd", - "at h", - "Ġo p", - "ĠÐ ¾", - "a it", - "Ġwor ld", - "Ġsu pp", - "i o", - "Ġc our", - "ĠÐ ¸", - "w ard", - "е н", - "Ġal ways", - "u p", - "Ġha nd", - "ĠH ow", - "ci al", - "Ġcon s", - "Ġ Ñ", - "Ġin d", - "Ġ 4", - "ĠA s", - "Ġf un", - "j ect", - "Ġimport ant", - "Ġs ur", - "e w", - "at es", - "Ġ 5", - "Ġd i", - "Ġm ade", - "Ġin s", - "Ġas k", - "Ġ et", - "Ġn um", - "Ġc ar", - "ĠO kay", - "Ġs im", - "i k", - "Ġl ast", - "ĠG o", - "Ġm us", - "Ġre l", - "ul ar", - "´ ì", - "ĠWe ll", - "pe ct", - "ĠTh ank", - "Ġth ree", - "à £", - "ã ĥ", - "Ġin v", - "Ġg en", - "l ic", - "Ġhapp en", - "ë Ĭ", - "i en", - "e ver", - "оР²", - "Ġst r", - "ĠA ll", - "Ġin st", - "Ġâ Ģ", - "Ġde f", - "Ġs l", - "Ġm ight", - "un g", - "Ġye ar", - "Ġo wn", - "Ġke ep", - "b ody", - "d er", - "Ġ ÑĤ", - "ĠÐ ´", - "Ġan other", - "Ġm od", - "Ġe v", - "Ġgu ys", - "Ġab le", - "ã o", - "qu e", - "id ent", - "ĠY es", - "Ġit s", - "Ġpl ace", - "Ġpro du", - "ar n", - "ĠÐ ¼", - "Ġre p", - "Ġex per", - "Ġf am", - "it ies", - "if ic", - "Ġh igh", - "i ed", - "o ol", - "ie w", - "е ÑĤ", - "re n", - "Ġdon e", - "Ġ ...", - "ëĬ Ķ", - "st em", - "ĠS e", - "Ġbet ter", - "c ome", - "Ġd el", - "Ġt y", - "Ġu m", - "Ġh o", - "ĠA n", - "Ġm on", - "ing s", - "Ġs k", - "Ġo b", - "c om", - "ble m", - "op e", - "st and", - "' d", - "ment s", - "Ġe le", - "ĠI s", - "Ġd a", - "Ġre g", - "le ase", - "i ke", - "al s", - "iz e", - "ê °", - "Ġc are", - "Ġne ver", - "ìĿ ´", - "es e", - "Ġm et", - "ol og", - "ĠWh en", - "u ck", - "е ÑĢ", - "Ġ é", - "Ġd at", - "à §", - "Ġex am", - "il ity", - "Ġd et", - "c ri", - "Ġus ed", - "ĠD o", - "Ġtr ans", - "e g", - "t en", - "Ñ İ", - "c us", - "Ġsec ond", - "Ġb est", - "Ġh ard", - "Ġ ide", - "Ġpro blem", - "ê ³", - "ĠU n", - "Ñ ħ", - "Ġ Î", - "Ġw atch", - "ĠS h", - "at ter", - "Ġpre t", - "Ġd er", - "Ġcour se", - "Å Ł", - "at ive", - "ic s", - "Ġquest ion", - "ut e", - "ì Ĺ", - "ĠF or", - "at her", - "Ġc ol", - "i end", - "Ġ í", - "Ġ Z", - "Ġdoes n", - "ar ch", - "Ġinter est", - "Ġp ol", - "Ġc or", - "i ence", - "Ġp res", - "Ġe ach", - "Ġsy stem", - "Ġf act", - "i el", - "ab ly", - "Ġ er", - "Ġr un", - "Ġì Ŀ", - "Ġto p", - "n er", - "Ġth ought", - "Ġe as", - "i ent", - "Ġc re", - "Ñ Ī", - "Ġcomm un", - "y e", - "re ady", - "ll ow", - "Ġevery thing", - "om m", - "Ġm ed", - "ļ Ķ", - "Ġc ount", - "it s", - "Ġcom pl", - "h ip", - "Ù Ħ", - "o ok", - "Ġto get", - "Ġtoget her", - "am p", - "Ġg ame", - "Ġal ready", - "аР»", - "Ġcall ed", - "al e", - "Å Ĥ", - "ĠM y", - "Ġunder stand", - "Ġd r", - "Ġm om", - "it ed", - "оР»", - "Ġus ing", - "z y", - "Ġnum ber", - "ãĢ ģ", - "c ed", - "Ġc le", - "н о", - "ëĭ ¤", - "in ce", - "Ġlook ing", - "Ġpret ty", - "Ġpro b", - "ĠS he", - "Ġ ve", - "Ġget ting", - "Ġwe ek", - "Ġe ff", - "u ff", - "a ir", - "u es", - "er n", - "Ġ Q", - "ou p", - "ent ion", - "Ġs ide", - "оР¼", - "Ġfor m", - "Ġb us", - "Ġas s", - "Ġ ed", - "as on", - "we en", - "âĢ ¦", - "Ġt urn", - "Ġc ur", - "Ġco ll", - "Ġd ire", - "ĠG od", - "Ġ1 0", - "Ġe qu", - "ĠÐ ±", - "Ġop en", - "Ġsu ch", - "ir d", - "аРº", - "Ġe ar", - "Ä Ļ", - "g an", - "Ġpart ic", - "Ġfr iend", - "Ġex p", - "Ġex t", - "Ġh ome", - "Ġw ater", - "ĠO n", - "ÑĤ ÑĮ", - "or k", - "Ġп ÑĢ", - "Ġmo ve", - "n ess", - "en se", - "h o", - "Ġch ar", - "c o", - "in s", - "Ġb oth", - "Ġ1 9", - "Ġg ra", - "Ġbet ween", - "á »", - "Ġì ķ", - "as h", - "ĠR e", - "a i", - "al th", - "u res", - "em ber", - "Ġa v", - "Ġ ver", - "à ª", - "one y", - "Ġth ank", - "Ġmay be", - "u c", - "im e", - "ê³ ł", - "Ġa way", - "Ġn ame", - "ou se", - "Ġac c", - "Ġmus ic", - "Ġch ange", - "Ġp ass", - "g er", - "Ġbu ild", - "Ġv al", - "in ess", - "an y", - "Ġfe w", - "´ ë", - "t a", - "Ġl ist", - "à ¥", - "Ġo ld", - "Ġì ŀ", - "Ġs ort", - "Ġme m", - "Ġc a", - "ce pt", - "Ġgen er", - "Ġye ah", - "Ġwh ile", - "Ġany thing", - "r ic", - "gr am", - "Ġe in", - "c y", - "ur ing", - "ĠD e", - "Ġp ower", - "Ġcom ing", - "Ġwor d", - "Ġ- -", - "Ġbel ie", - "Ġf ound", - "t o", - "Ð ¿", - "Ġme ans", - "Ġin form", - "Ġ Ø", - "Ġ Ñĩ", - "Ġsm all", - "00 0", - "Ġc ame", - "Ġ íķ", - "w h", - "Ġwork ing", - "Ġexam ple", - "Ġp os", - "Ġde p", - "ê ²", - "ä º", - "ot e", - "Ġde m", - "ì §", - "t s", - "Ġv ar", - "a ut", - "Ġt ri", - "ch n", - "Ġhe ad", - "Ġwho le", - "× Ļ", - "z e", - "Ġtry ing", - "Ġt em", - "Ġc ou", - "et s", - "Ġ 6", - "Ġf il", - "vel op", - "Ġc ase", - "à ¯", - "Ġprob ably", - "Ġo kay", - "Ġpl an", - "Ġs it", - "Ġsch ool", - "ĠTh en", - "¸ ë", - "m e", - "Ġpro cess", - "Ġf ar", - "Ġre ad", - "Ġp oss", - "Ġb re", - "Ġso l", - "ic ht", - "Ġsupp ort", - "ĠT o", - "ert ain", - "Ġstart ed", - "Ġc ap", - "Ġle ft", - "Ġdat a", - "Ġtim es", - "еР»", - "Ġwant ed", - "а н", - "Ġtalk ing", - "Ġis t", - "Ġha ving", - "um p", - "Ġcont in", - "Ġsu b", - "ĠÐ ·", - "p r", - "ëĭ Ī", - "in a", - "Å ¼", - "Ġc reat", - "od e", - "× ķ", - "æ ĺ", - "! !", - "Ġt erm", - "is m", - "оР´", - "ĠBe cause", - "Ġw ent", - "id er", - "Ġpro v", - "Ġch ild", - "Ġd en", - "Ġl ight", - "b r", - "³ о", - "o h", - "Ġbo ok", - "Ġ Ù", - "ut ion", - "ĠJ ust", - "en e", - "Ġf our", - "Ġv is", - "ê° Ģ", - "Ġh ope", - "Ġmak ing", - "ĠL e", - "ì ķ", - "Ġo pp", - "a u", - "Ġm oney", - "Ġpro gram", - "à ¨", - "Ġst and", - "I N", - "Ġs ign", - "Ġle arn", - "à ł", - "ĠD on", - "Ġte am", - "Ġн а", - "l ud", - "Ġre st", - "ic es", - "æ ľ", - "Ġ ÑĢ", - "Ġa ut", - "Ġle ad", - "ation al", - "d e", - "g y", - "Ġn ice", - "Ġd as", - "Ġd ist", - "Ġh um", - "ĠO ne", - "æ Ī", - "Ġcom es", - "Ġj o", - "Ġc ent", - "Ġex pl", - "Ġm ark", - "re en", - "l ed", - "g in", - "ì ļĶ", - "Ġle vel", - "Ġcon f", - "us h", - "Ġde velop", - "Ġt est", - "en g", - "v ious", - "at ure", - "еР¼", - "re t", - "Ġj e", - "Ġst uff", - "Ġcl ass", - "ow s", - "Ġê ·", - "Ġs i", - "Ġl es", - "ro p", - "ç ļ", - "Ġp or", - "Ġw ar", - "ìĹ IJ", - "Ġevery one", - "Ġg e", - "Ġche ck", - "ot t", - "Ġs ing", - "Ġar t", - "Ġfo llow", - "Ġ20 1", - "ĠF r", - "a is", - "ì ĸ", - "Î ±", - "å °", - "Ġà ł", - "im es", - "Ġre t", - "Ġch ang", - "Ġp ub", - "Ġin f", - "Ġte chn", - "ad a", - "iv es", - "Ġbe h", - "æĺ ¯", - "Ġlook s", - "ãĢ Ĥ", - "Ð ·", - "ĠWh y", - "çļ Ħ", - "Ġen ough", - "Ġb ra", - "it ch", - "ä »", - "Ġad v", - "Ð ±", - "Ġwith out", - "w er", - "mer ic", - "d en", - "Ġcompl et", - "Ġide a", - "ter s", - "o ck", - "Ġdef in", - "Ġe ver", - "Ġg l", - "Ġon ce", - "Ġbr ing", - "Ġsay ing", - "Ġan s", - "Ġhe ar", - "n ect", - "Ġl ess", - "g o", - "re am", - "ad o", - "ì ŀ", - "Ġm ind", - "ent e", - "Ġf ull", - "Ġb ad", - "Ġw om", - "Ġsome one", - "Ġd u", - "Ġw on", - "Ġcont ro", - "ort un", - "Ġhe alth", - "Ġch o", - "ĠA r", - "Ġcon c", - "Ġinform ation", - "Ġst op", - "at t", - "at ely", - "ä ½", - "Ġgr oup", - "Ġ Ñĥ", - "Ġqu ite", - "Ġres p", - "E R", - "ug ht", - "ê ¸", - "m an", - "iz ed", - "ĠB r", - "Ġrem ember", - "Ġfam ily", - "Ġbus iness", - "a w", - "Ġspe c", - "Ġa u", - "ĠO r", - "Ä ħ", - "Ġse en", - "Ġl ar", - "Ġ 7", - "g g", - "b ers", - "Ġd ra", - "Ġmon th", - "Ġsay s", - "Ġis s", - "Ġli ve", - "Ġl ine", - "Ġmom ent", - "Ġex c", - "el s", - "Ġs ound", - "Ġco ol", - "Ġlo c", - "Ġc ertain", - "Ġd ri", - "о ÑĤ", - "am es", - "Ġm ust", - "n y", - "и ÑĤ", - "Ġk id", - "Ġinc lud", - "ìĿ Ħ", - "at or", - "Ä Ł", - "h a", - "are d", - "Ġse em", - "Ð ¹", - "ì Ħ", - "Ġel se", - "Ġì ł", - "ir l", - "Ġ 8", - "Ġv o", - "Ġquest ions", - "in es", - "e e", - "æĪ ij", - "ü r", - "ĠA meric", - "Ġst ory", - "Ġser v", - "ver n", - "ag es", - "l and", - "ĠâĢ ĵ", - "er a", - "ĠC an", - "Ġp op", - "et her", - "Ġn a", - "Ġor der", - "Ġmak es", - "Ġs ince", - "c on", - "ct or", - "Ġth ough", - "Ġprodu ct", - "л и", - "Ġle g", - "Ġme et", - "al f", - "Ñģ Ñı", - "un ch", - "it er", - "o ve", - "×ķ ×", - "i et", - "аР¼", - "it al", - "Ġsu per", - "l ing", - "Ġp ay", - "Ġpar a", - "Ġj ob", - "ĠH ere", - "Ġs w", - "k s", - "pt ion", - "m a", - "Ġbelie ve", - "¬ ë", - "Ġw ait", - "оР¹", - "Ġun t", - "Ġqu ick", - "h r", - "ĠÑ į", - "ĠP ro", - "Ġm en", - "à ¹", - "Ġday s", - "Ġgo es", - "Ġspe ak", - "ĠA t", - "em ent", - "Ġm iss", - "Ġa w", - "Ġdes ign", - "Ġpro ject", - "о ÑĢ", - "i j", - "ant s", - "at s", - "ĠCh r", - "Ġ 9", - "Ġc ut", - "Ġre qu", - "Ġн е", - "ĠN ot", - "as ter", - "Ġm ill", - "Ġpartic ular", - "Ġp ie", - "Ġstud ents", - "Ġf ive", - "ou n", - "ĠN e", - "Ġg i", - "Ġp as", - "Ġf ree", - "ĠS p", - "l ich", - "Ġpro f", - "Ġen g", - "Ġpr ot", - "ĠL ike", - "os ed", - "Ġcon nect", - "a pp", - "Ġë §", - "it ing", - "Ġb lo", - "Ġl os", - "ist s", - "Ġexper ience", - "re nt", - "Ġst ay", - "Ġfo od", - "t on", - "ru ct", - "Ġh ist", - "v iew", - "in ing", - "m ost", - "i vers", - "b o", - "ãģ Ħ", - "ĠT r", - "g en", - "Ġp lease", - "Ġcommun ity", - "Ġc e", - "A N", - "n o", - "Ġb ody", - "Ġh our", - "Ġ vers", - "á º", - "c er", - "Ġê °", - "Ġre ason", - "ĠR ight", - "Ġl ater", - "Ï Ħ", - "Ġh ouse", - "Ġ X", - "оР½", - "Ġst ate", - "f ic", - "å ¤", - "Å Ľ", - "iel d", - "Ġp ri", - "Ġp ast", - "Ġw alk", - "olog y", - "er ing", - "an na", - "Ġt er", - "Ġho ld", - "Ġor gan", - "b en", - "Î ¿", - "ó n", - "Ġeff ect", - "Ġyour self", - "Ġpl us", - "a j", - "and o", - "ur al", - "Ġro om", - "le ct", - "ê² Į", - "? \"", - "s ide", - "Ġbe come", - "Ñ Ĩ", - "Ġ Â", - "o od", - "Ġcon st", - "Ġn ight", - "ut es", - "Ð ¶", - "Ġbre ak", - "Ġp ain", - "Ġst ep", - "ire d", - "Ġnot hing", - "Ġunt il", - "Ñ ĸ", - "аР²", - "Ù Ĭ", - "Ġd uring", - "ì§ Ģ", - "l ess", - "o ll", - "н Ñĭ", - "Î ¹", - "f ect", - "i ver", - "ı Ħ", - "ith er", - "y ing", - "Ġbe gin", - "×Ļ ×", - "iv id", - "Ġà §", - "Ġs al", - "Ġt a", - "Ġp ot", - "Ġ $", - "Ġm ar", - "Ġcle ar", - "Ġf ace", - "Ġgr ow", - "Ġ *", - "Ġins ide", - "Ġfriend s", - "Ġle ave", - "en n", - "Ġeas y", - "Ġare a", - "al ity", - "ou d", - "Ġe at", - "Ù Ĩ", - "Ġp ur", - "or n", - "Ġsa w", - "Ġans wer", - "Ġfr ont", - "Ġbe aut", - "¼ ë", - "Ġm atter", - "Ġs on", - "ĠN ew", - "Ġres ult", - "id es", - "ch e", - "Ġf ut", - "p s", - "Ġfo cus", - "Ġinterest ing", - "å ¥", - "Ġa p", - "\" .", - "Ġcre ate", - "о Ñģ", - "Ġp ress", - "r oss", - "Ġp ick", - "l ine", - "Ġto ok", - "ĠM ay", - "r ow", - "Ġ ich", - "ĺ ë", - "Ġre f", - "Ġm or", - "r act", - "are nt", - "A R", - "Ġex act", - "Ġsp ace", - "w ork", - "н и", - "Ġb ir", - "Ġde v", - "Ð ³", - "Ġto ld", - "Ġpub lic", - "ci ally", - "Ġv iew", - "ĠHe y", - "m ed", - "ll o", - "c c", - "Ġf ac", - "Ġcou ple", - "Ġhe art", - "l er", - "Ġre ady", - "Ġal most", - "ar ing", - "Ġh alf", - "ĠM e", - "av or", - "i que", - "Ġchar ac", - "Ġpr act", - "O N", - "an e", - "Ġ il", - "н а", - "Ġv i", - "l ish", - "he ad", - "Ġle ast", - "Ġbas ically", - "as ed", - "r ight", - "Ġy et", - "Ġtak ing", - "Ġcount ry", - "Ġw in", - "Ġis n", - "Ġposs ible", - "Ġc am", - "Ġinc re", - "Ġp at", - "Ġw anna", - "Ġcons ider", - "Ġab s", - "Ġwith in", - "Ġhum an", - "Ġthink ing", - "Ġo h", - "¡ ľ", - "Ġqu i", - "as es", - "Ġ 0", - "it ely", - "ä¸ į", - "Ġk ill", - "Ġm il", - "Ġinv est", - "is ter", - "Ġsu c", - "ion al", - "el f", - "Ġwh ether", - "Ġcontro l", - "Ġagain st", - "ot s", - "ëĭĪ ëĭ¤", - "i or", - "Ġpres ent", - "Ġ ا", - "Ġwatch ing", - "u be", - "er v", - "Ġn icht", - "Ġgo vern", - "ĠTh ese", - "Ġ :", - "u it", - "ug h", - "Ġwork s", - "o o", - "Ġw ir", - "Ġa ir", - "ĠT e", - "аР·", - "is ion", - "wh ere", - "Ġto t", - "j oy", - "ì ĭ", - "Ġv ol", - "ĠÐ µ", - "Ġcl ose", - "ĠA d", - "Ñ ī", - "in ed", - "Ġun a", - "Ġê· ¸ë", - "° ë", - "or ry", - "Ġb ro", - "Ġfil m", - "if t", - "2 0", - "Ġty pe", - "Ġhappen ed", - "ĠA m", - "Ġg irl", - "ĠA re", - "ward s", - "Ġp our", - "Ġcol or", - "el t", - "а Ñģ", - "Ġs ense", - "le x", - "ĠW ith", - "us s", - "ri b", - "Ġre se", - "Ġn orm", - "Ġfut ure", - "Ġde al", - "end ing", - "e y", - "Ġ x", - "er o", - "ĠC l", - "u k", - "Ġwhat ever", - "sel ves", - "Ġyou ng", - "ì Ĭ", - "ĠM ar", - "ĠChr ist", - "Ġgu ess", - "Ġper form", - "Ġen er", - "r on", - "Ġh it", - "Ġw ond", - "Ġdire ct", - "ĠE very", - "Ġof ten", - "Ġf a", - "Ġal ong", - "Ġcl ick", - "ĠL ook", - "Ġsit u", - "Ġhapp y", - "e ad", - "Ġag o", - "Ġen c", - "Ġmy self", - "Ġco ver", - "оР±", - "Ġm id", - "Ġc ost", - "Ġt en", - "ĠS ch", - "Ġex pect", - "Ġwas n", - "Ġstr ong", - "if ul", - "Ġopp ortun", - "in al", - "y le", - "Ġsh are", - "Ġtr ue", - "Ġapp ro", - "Ġch all", - "Ġmin utes", - "Ġch ann", - "Ġë Ĥ", - "Î µ", - "l i", - "Ġm ess", - "or ies", - "pe cially", - "Ġwr ong", - "Ġy es", - "Ġì Ĺ", - "ir on", - "Ġall ow", - "Ġsu bs", - "Ġf ore", - "Ġf ight", - "Ġso cial", - "Ġc ra", - "an a", - "Ġa ff", - "Ġ ess", - "Ġway s", - "Ġsh ort", - "Ġf all", - "Ġla w", - "ĠWh o", - "Ġen joy", - "Ġc al", - "Ġac cess", - "f e", - "Ġn on", - "Ġac ross", - "er y", - "vious ly", - "ĠE x", - "id ed", - "Ġl ink", - "ĠP r", - "Ġterm s", - "ac es", - "Ġl and", - "az ing", - "Ġ1 5", - "Ġm ult", - "Ġspe cial", - "å Ģ", - "iv ing", - "ìĿ Ģ", - "Ġty p", - "Ġst e", - "Ġ Ä", - "Ġfor ward", - "å ı", - "Ġf re", - "å¥ ½", - "Ġrese arch", - "௠į", - "а ÑĤ", - "Ġma in", - "Ġrec ord", - "Ġh u", - "Ġdefin itely", - "Ġe ither", - "Ġlist en", - "Ġke y", - "Ġmark et", - "ĠÑĩ ÑĤо", - "iz ation", - "Ġvide os", - "Ġgu y", - "Ġf ig", - "Ġst ra", - "ĠP l", - "ull y", - "am os", - "Ġm ention", - "Ġs ong", - "Ġinter n", - "r al", - "ur s", - "Ġh on", - "Ġval ue", - "Ġb ar", - "c le", - "оР¶", - "Ä ĩ", - "ľ ë", - "Ġz u", - "и м", - "ä½ ł", - "Ġsing le", - "Ġa uch", - "cus s", - "Ġget s", - "Ġsomet imes", - "å ¾", - "am b", - "m m", - "c ing", - "Ġper fect", - "ĠB l", - "out h", - "ì ł", - "Ġs ci", - "p ar", - "Ġre d", - "Ġp ost", - "Ġm ot", - "Ġele ct", - "ĠE u", - "it ive", - "ĠS ome", - "Ġdes cri", - "Ġcur rent", - "é s", - "Ġt re", - "ĠE n", - "Ġm it", - "E N", - "Ī ë", - "i um", - "Ġhe ard", - "Ġsim ple", - "l ar", - "Ġevery body", - "il ar", - "Ġneed s", - "Ġdif fic", - "ĠGo od", - "um ent", - "c ent", - "Ġo per", - "а ÑĤÑĮ", - "et y", - "Ġbl ack", - "Ġgi ven", - "on es", - "Ġwe l", - "é Ģ", - "Ġìķ Ħ", - "Ġ3 0", - "A T", - "Ġst at", - "ou ch", - "ĠM r", - "а ÑĢ", - "Ġsh o", - "Ġcon d", - "× Ķ", - "m y", - "Ġchild ren", - "Ġe u", - "еР´", - "ìķ Ħ", - "ter n", - "Ġu h", - "Ġh ar", - "Ġpr om", - "Ġp ull", - "re w", - "Ġcomp any", - "Ġbeaut iful", - "ust om", - "íķ ĺ", - "к и", - "Ġst re", - "Ġam azing", - "ri es", - "Ġsuc cess", - "Ġm ach", - "n ot", - "Ġdis cuss", - "Ġn at", - "¦ ¬", - "Ġun e", - "Ġdiffic ult", - "Ġr is", - "Î ½", - "Ġc amp", - "Ġbu y", - "ä¸ Ģ", - "Ġma g", - "p o", - "ĠY our", - "Ġbeh ind", - "ic a", - "ı n", - "ĠO K", - "Ġl ang", - "Ġwom en", - "Ġen v", - "Ġre ce", - "Ġchann el", - "i ally", - "u le", - "Ġ1 2", - "th ers", - "Ġb ott", - "Ġrep ort", - "ent ly", - "f ully", - "T he", - "Ġs ent", - "Ġev ent", - "Ġener gy", - "l t", - "Ġword s", - "ar r", - "d le", - "Ġa head", - "ard s", - "Ø ±", - "äº Ĩ", - "Ġto ol", - "con om", - "е Ñģ", - "Ġexact ly", - "Ġf avor", - "Ġl ow", - "Ġpro per", - "Ġìŀ Ī", - "Ġ !", - "Ġrel ations", - "Ġm as", - "Ġkid s", - "Ġent ire", - "ud e", - "Ù ħ", - "ĠWh ere", - "Ġon es", - "Ġc ity", - "ol ut", - "Ġs ix", - "ab ility", - "ö r", - "il i", - "ĠE s", - "Ġhapp ens", - "ain s", - "Ġmod el", - "Ġp ict", - "Ġes pecially", - "Ġ1 00", - "k t", - "Ġso on", - "b y", - "ro du", - "Ġan n", - "Ġsubs cri", - "ĠQ u", - "Ġav ail", - "im ent", - "Ġv oc", - "k a", - "Ġ2 00", - "ap er", - "ĠI nd", - "Ġì §", - "h or", - "į °", - "j or", - "и л", - "Ġs qu", - "A U", - "ar ning", - "ĠÐ ³", - "I S", - "ĠÐ »", - "еР¹", - "y es", - "å ħ", - "ĠÐ Ĵ", - "Ġor ig", - "оР³Ð¾", - "Ġask ed", - "il t", - "оР³", - "Ġcontin ue", - "Ġì ĺ", - "r am", - "Ġo thers", - "E S", - "oh n", - "Ġl ay", - "Ġbas ed", - "Ġp u", - "Ġapp e", - "Ġl im", - "Ġpro p", - "Ģ ë", - "m in", - "Ġh ot", - "ĠL a", - "Ġf ast", - "Ġprot ect", - "Ġam ount", - "Ġa qu", - "Ġf und", - "Ġc ustom", - "Ġc ult", - "Ġhand s", - "Ġha ven", - "Ġa ud", - "Ġout side", - "ĠA fter", - "ap s", - "Ġan im", - "pl oy", - "Ġh at", - "ĠF irst", - "Ġt reat", - "Ġe p", - "Ġm ater", - "Ġbuild ing", - "Ġë °", - "å IJ", - "ìĦ ľ", - "z a", - "ught er", - "ĠP e", - "ne y", - "et er", - "at ic", - "Ġed uc", - "ê¸ °", - "Ġmo v", - "ĵ ¤", - "am a", - "r ation", - "Ġs n", - "Ù Ī", - "Ġs um", - "Ġph ot", - "ĠÐ Ŀ", - "Ġ .", - "æľ ī", - "Ġfin ish", - "itt ing", - "å ®", - "Ġlar ge", - "Ġì ĸ", - "Ġwh ite", - "ar a", - "Ġma is", - "ĠH i", - "Ġd am", - "Ġا ÙĦ", - "Ġbo x", - "ĠHe llo", - "Ġs le", - "Ġo pt", - "ri ed", - "¥ ¼", - "Ġact iv", - "Ġn ão", - "ĠC om", - "Ġplay ing", - "T h", - "Ġavail able", - "Ġp ort", - "å Ī", - "ĠA h", - "Ġl as", - "Ġear ly", - "Ġwond er", - "± °", - "Ġ1 8", - "c ul", - "Ġfun ction", - "Ġmor ning", - "ll e", - "i ents", - "u x", - "Ġc ir", - "it ions", - "Ġde ep", - "Ġpol it", - "y or", - "m p", - "ak ing", - "Į ë", - "ĠM an", - "Ġmill ion", - "Ġ /", - "Ġind ivid", - "Ġp an", - "Ġgovern ment", - "Ġwr ite", - "ĠT od", - "am ent", - "Ġ Ï", - "Ġw ind", - "ĠE ng", - "ch en", - "W h", - "ì ľ", - "Ġ ident", - "ãģ §", - "v ent", - "ur ch", - "Ġh y", - "Ġy a", - "Ġtr ad", - "Ġrelations hip", - "à º", - "Ġd ou", - "O R", - "Ġs we", - "Ġne g", - "in ation", - "Ġte xt", - "i pp", - "Ġf ine", - "á s", - "ĠD r", - "ĠC ome", - "Ġmonth s", - ", \"", - "ен и", - "Ġhour s", - "Ġp od", - "ir t", - "Ġinv ol", - "Ġcoll ect", - "Ġau f", - "Ġp a", - "Ġhist ory", - "m b", - "if y", - "Ġ ?", - "Ġbel ow", - "as ure", - "ab y", - "Ġlang u", - "Ġan t", - "Ġcom b", - "at o", - "Ġex ist", - "Ġë ĭ", - "Ġtak es", - "Ġcharac ter", - "a ff", - "Ġf ield", - "Ġe conom", - "ie f", - "Ġpie ce", - "å ľ", - "Ġre ach", - "Ġê ²", - "on y", - "Ġmater ial", - "Ġd ig", - "Ġph ys", - "Ġimp ro", - "Ġsim ilar", - "I C", - "Ġn et", - "y n", - "Ġpos ition", - "à Ł", - "Ġb ene", - "re ad", - "Ġle arning", - "um e", - "Ġcle an", - "ÑĤо ÑĢ", - "Ġco ok", - "Ġseem s", - "Ġo l", - "ĠU S", - "ĠJ es", - "Ġ à®", - "ent ial", - "ivers ity", - "ac y", - "Ġ Ñı", - "olut ely", - "re ct", - "ĠP lease", - "Ġrep res", - "Ġt ouch", - "m en", - "ĠÐ °", - "i ón", - "ĠThank s", - "Ġan g", - "Ġma jor", - "Ġit self", - "ill s", - "\" ,", - "i ans", - "Ġsc reen", - "Ġh or", - "Ġknow n", - "Ġenv iron", - "Ġfin al", - "Ġfig ure", - "ĠT w", - "Ġe yes", - "Ġim ag", - "Ġsee ing", - "Ġha ir", - "re m", - "Ġapp lic", - "end s", - "p ut", - "Ġnew s", - "Ġcomplet ely", - "ugh s", - "Ġkn ew", - "if ied", - "ĠJ e", - "ĠD id", - "Ġsitu ation", - "Ġf lo", - "m s", - "Ġph one", - "Ġb all", - "d o", - "Ġp arent", - "Ġs orry", - "ur y", - "и н", - "ip s", - "аР´", - "Ġinst ead", - "Ġhu ge", - "Ġt u", - "Ġ ãģ", - "ĠG r", - "Ġdet ail", - "ĠÐ Ł", - "Ġindivid ual", - "Ġf ire", - "Ġcl os", - "Ġw er", - "un e", - "Ġrun ning", - "Ġcon vers", - "Ġrec omm", - "Ġcom o", - "Ġsome body", - "ĠJ ohn", - "ĠìĿ ´", - "ĠO ur", - "pl es", - "ĠP h", - "Ġan al", - "Ġ5 0", - "Ġof fer", - "Ġ <", - "ition al", - "g est", - "Ġv ous", - "l et", - "ic y", - "Ġfeel ing", - "L E", - "r os", - "Ġth ird", - "оРº", - "Ġser ies", - "ĠAn y", - "is ed", - "o ld", - "Ġdra w", - "Ġserv ice", - "Ġcan not", - "b al", - "ãģ Ĩ", - "Ġli ving", - "ı m", - "Ġdiffer ence", - "Ġopportun ity", - "Ġne ar", - "or th", - "k en", - "Ġloc al", - "Ø ª", - "ĠC on", - "Ġob ject", - "Ġd ass", - "ãģ Ļ", - "IJ ×", - "Ġquick ly", - "ra ph", - "Ġiss ues", - "éĢ Ļ", - "ĠAmeric an", - "Ġpre p", - "en ces", - "Ġprof ess", - "ll ing", - "o f", - "Ġfo ot", - "b re", - "Ġus ually", - "Ġgener al", - "d a", - "an ces", - "Ġd est", - "Ġo cc", - "Ġmem bers", - "Ġd ans", - "Ġequ al", - "z t", - "Ġbe com", - "Ġmo ving", - "Ġspec ific", - "ÃŃ a", - "Ġf ur", - "Ġne cess", - "Ġcomm on", - "Ġatt ack", - "ĠÑį ÑĤо", - "ĠTod ay", - "Ġun s", - "ĠG u", - "i od", - "Ġacc ount", - "Ġgra nd", - "Ġs elf", - "ĠE l", - "Ġt ast", - "Ġcont ent", - "Ġc u", - "Ħ ë", - "ĠMay be", - "ĠJes us", - "ore s", - "p ort", - "© ´", - "Ġg ives", - "Ġnorm al", - "ÑĢ Ñĥ", - "Ġimp act", - "ä r", - "Ġd ies", - "Ġl ab", - "s h", - "i os", - "ĠP res", - "ĠU nd", - "ĠO f", - "Ġfin ally", - "Ġdo ll", - "Ġvoc ê", - "p ly", - "ĠA g", - "Ġtak en", - "Ġgr ound", - "f ort", - "Ġg ave", - "ĠIn st", - "Ġl ost", - "Ġwork ed", - "Ġl iter", - "Ġiss ue", - "Ġind ust", - "Ġret urn", - "Ġhappen ing", - "Ġwant s", - "и в", - "Ġproblem s", - "ĠC ar", - "Ŀ ¼", - "ĠAl so", - "Ġs ize", - "Ġob viously", - "ĠS u", - "ĠS c", - "Ġrecomm end", - "our ces", - "ast ic", - ".. ..", - "Ġm i", - "l ier", - "ĠE ven", - "ci a", - "Ġh ur", - "v a", - "Ġm ass", - "Ġwould n", - "un t", - "ck s", - "Ġf elt", - "os p", - "l ight", - "ол ÑĮ", - "n ie", - "Ġbott om", - "Ġб Ñĭ", - "ore d", - "is on", - "Ġgr ad", - "Ġum a", - "Ġv a", - "Ġì Ĥ", - "ress ion", - "ul ation", - "I D", - "id ence", - "Ġb ur", - "Ġg one", - "l u", - "ìĸ ´ì", - "Ġre du", - "Ġj a", - "ìĿ ĺ", - "it a", - "Ġso ft", - "Ġç a", - "ic o", - "er al", - "à ±", - "a f", - "Ġpoint s", - "g u", - "Ġd é", - "ap t", - "a x", - "ĠAl right", - "Ġcam era", - "Ġa ch", - "Ġп о", - "Ġse ver", - "5 0", - "Ġs ie", - "Ï ģ", - "Ġm al", - "Ġcomp ut", - "Ġmid dle", - "Ġcould n", - "m ing", - "Ġì ĭ", - "ĠH is", - "Ġg ames", - "Ġint rodu", - "Ġc ell", - "p or", - "Ġsle ep", - "Ġë ³", - "id ing", - "Ġ ou", - "Ġde g", - "Ġdr ink", - "Ġenviron ment", - "ĠUn ited", - "Ġtalk ed", - "Ġcho ose", - "Ġj our", - "e ge", - "ĠM in", - "Ġint e", - "Ġr ather", - "Ġoff ic", - "к а", - "ac hing", - "Ġmention ed", - "Ġf ill", - "Ġtr ack", - "Ġn ie", - "Ġ ut", - "Ġв Ñĭ", - "ib ility", - "Ġv ac", - "Ġr ad", - "Ġp ack", - "Ġs end", - "ĠD as", - "ĠA b", - "Ġeng ine", - "ãģ Ĺ", - "Ġcomp et", - "à ´", - "Ġв Ñģ", - "Ġdo or", - "Ġlong er", - "å° į", - "Ġlangu age", - "Ġext ra", - "pl ay", - "Ġwe bs", - "um b", - "ro om", - "ç ľ", - "Ġbegin ning", - "Ġre fer", - "A M", - "n en", - "ig her", - "f ace", - "er c", - "Ġfor get", - "Ġcom ment", - "еРº", - "л Ñı", - "r or", - "ż e", - "ĠG e", - "Ġd ark", - "Ġany one", - "ant e", - "g es", - "ìĬ µ", - "Ñ ij", - "b ed", - "j e", - "ruct ure", - "Ġpr im", - "id a", - "è ¦", - "ãģ ¾", - "Ġm ix", - "Ġstart ing", - "ĠìĿ ´ë", - "Ġprov ide", - "act ion", - "Ġm other", - "Ġper iod", - "Ġst ick", - "ĠYou T", - "Ġtechn ology", - "ê ¹", - "Ġb ed", - "Ġg iving", - "Ġexpl ain", - "z en", - "im ate", - "Ġrepres ent", - "lo ad", - "ĠHow ever", - "Ġli ves", - "ut h", - "ir it", - "og n", - "Ġli k", - "Ġresp ons", - "Ġpri v", - "Ġto m", - "ç ão", - "i am", - "Ġexc ited", - "Ġc ard", - "gr ound", - "Ġ× Ķ", - "Ġs ens", - "Ġte ach", - "id o", - "h od", - "Ġep is", - "Ġwel come", - "Ġw all", - "ä ¹", - "Ġch ance", - "h en", - "ĠÐ ¡", - "ĠÄ ij", - "Ġsim ply", - "ĠÑĤ ак", - "r ing", - "j a", - "b ook", - "Ġsever al", - "st e", - "Ġcreat ed", - "Ġо ÑĤ", - "Ġp ush", - "= =", - "Ġh igher", - "u f", - "our ce", - "o ke", - "Ġon line", - "Ġre le", - "Ġt on", - "ens ive", - "Ġfavor ite", - "Ñĥ д", - "Ġlook ed", - "Ġv on", - "âĢ Ķ", - "Ġf ür", - "Ġbut ton", - "Ġb ill", - "Ġchang es", - "! \"", - "Ġsl ow", - "ab les", - "Ġde ath", - "and s", - "ate g", - "Ġthem selves", - "ãģ £", - "Ġc op", - "ãģ ®", - "Ġperson al", - "ug hing", - "Ġ1 1", - "g ar", - "ad es", - "Ġneed ed", - "Ġstud y", - "ag ed", - "ÑģÑĤ в", - "in o", - "Ġdis c", - "k i", - "Ġadd ress", - "× ¨", - "itt en", - "es ome", - "ĠÐ ¶", - "¤ ë", - "ur a", - "Ġm u", - "Ġcontin u", - "f or", - "Ġm atch", - "ãģ ¦", - "Ġstra ight", - "IJ ë", - "n ers", - "Ġdo g", - "Ġde b", - "ĠC O", - "Ġo s", - "g ed", - "c ame", - "Ġcor rect", - "et te", - "ĠSe e", - "Ġinclud ing", - "ĠEu ro", - "est er", - "Ġj ump", - "ĠWh ich", - "Ġк ак", - "s on", - "y a", - "IN G", - "Ġe ine", - "os h", - "en cy", - "Ġmed ia", - "Ġsubscri be", - "é Ĥ", - "Ġpr in", - "Ġha b", - "ĠP er", - "ĠW as", - "Ġp age", - "it or", - "Ġto wards", - "Ġtri ed", - "en ge", - "art ment", - "Ġvar i", - "Ġp aper", - "Ġpict ure", - "Ġvers ion", - "Ġbr ought", - "w are", - "ĠSt ates", - "Ġs ich", - "led ge", - "Ġper cent", - "Ġgo d", - "e c", - "ĠC omm", - "Ġdec ided", - "Ġse lect", - "íķ ľ", - ") .", - "ur ity", - "Ġfur ther", - "Ġcom ments", - "le ment", - "Ġd ream", - "Ġcent er", - "m i", - "Ġc as", - "Ġwom an", - "Ġro ad", - "Ġf ail", - "Ġbe came", - "l us", - "il ities", - "ãģ ¯", - "ĠC o", - "Ġman age", - "Ġrec ogn", - "Ġact ion", - "Ġbene f", - "Ġear lier", - "× ľ", - "Ġspe ed", - "Ġm ent", - "Ġso ci", - "Ġsho ot", - "u i", - "Ġà ¤", - "Ġapp ly", - "v o", - "x im", - "Ġca use", - "Ġsur pr", - "Ġha ben", - "D I", - "Ġf ather", - "ĠNe xt", - "ĠYouT ube", - "Ġc ode", - "Ġro le", - "g ress", - "Ġg reen", - "et t", - "Ġbu ilt", - "Ġfl ow", - "Ġb ase", - "Ġtra ining", - "Ġr ound", - "ĠW ill", - "Ġp ath", - "ĠR o", - "Ġinterest ed", - "ìĸ ´", - "Ġres pect", - "Ġchang ed", - "iss ion", - "Ġstud ent", - "og raph", - "Ġappro ach", - "Ġshow s", - "å° ±", - "Ġt ar", - "Ġcr it", - "Ġg lo", - "ìĬµ ëĭĪëĭ¤", - "Ġde ad", - "ĠPres ident", - "Ġth ous", - "Ġb al", - "st er", - "e x", - "Ġabs olutely", - "Ġm ic", - "Ġpract ice", - "Ġqu ality", - "Ġl ower", - "og le", - "Ġse par", - "b all", - "med i", - "Ġre view", - "ĠA pp", - "Ġo k", - "âĢ ĭ", - "Ġexper ien", - "Ġconc ern", - "ent ially", - "m ore", - "ĠJ o", - "ap an", - "ĠI ch", - "ist ic", - "Ġf air", - "Ġwebs ite", - "i res", - "ĠB y", - "Ġtra vel", - "Ġris k", - "Ġm ir", - "Ġbo ard", - "Ġs en", - "Ġparent s", - "ĠW ow", - "Ġfe ed", - "Ġsa ve", - "Ġser ious", - "Ġin it", - "E L", - "und red", - "A S", - "Ġv an", - "or row", - "Ġwor th", - "Ġse arch", - "Ġ1 6", - "Ġpart s", - "ÑģÑĤ ÑĮ", - "Ġcomp an", - "Ġmov ie", - "Ġmet hod", - "Ġ ill", - "Ġw ish", - "d y", - "Ġit em", - "Ġmin us", - "ang er", - "Ġvo ice", - "Ġsk in", - "Ġare as", - "Ġe ight", - "Ġo bs", - "Ġ ,", - "аР¹", - "Ġo il", - "Ġc y", - "Ġb aby", - "s y", - "Ġem ploy", - "ĠK e", - "Ġpl aces", - "Ġf ix", - "Ġest á", - "ãģ ¨", - "iv ed", - "Ġlot s", - "Ġse ason", - "un k", - "al t", - "Ġt able", - "ĠÐ ¢", - "à ¢", - "Ġatt ention", - "ãģ ª", - "ĠH er", - "Ġa ge", - "Ġp ra", - "b ack", - "c il", - "Ġnet work", - "r it", - "Ġdo c", - "Ġare n", - "ig en", - "Ġë Ħ", - "Ø ¯", - "end er", - "Ġtot al", - "Ġpr ice", - "Ġcra zy", - "ì ļ", - "i qu", - "th ough", - "Y ou", - "Ù ĩ", - "ãĤ ĵ", - "Ï ħ", - "Ġs at", - "Ġb i", - "ĠD ie", - "Ġsh a", - "Ġthank s", - "u h", - "Ġst age", - "аР¶", - "ĠF l", - "Ġle av", - "Ġbo y", - "Ġa f", - "ö n", - "ĠG et", - "Ġac cept", - "Ġent er", - "Ġt ur", - "Ġsi ÄĻ", - "Ġhon est", - "ãĢ Į", - "Ġs am", - "Ġre pl", - "g ing", - "Ġdevelop ment", - "ĠA ct", - "or a", - "ãĢ į", - "ä ¾", - "Ġknow s", - "Ġim age", - "ĠL ord", - "и ÑĤÑĮ", - "Ġweek s", - "Ġse x", - "Ķ ë", - "Ġh undred", - "Ġsound s", - "Ġlearn ed", - "Ġb ud", - "ĠÑģ ÑĤ", - "Ġinc red", - "â Ļ", - "Ġn os", - "Ġd rop", - "Ġb en", - "ĠÐ ĺ", - "Ġsa fe", - "at a", - "Ġf uck", - "so ci", - "Ġd an", - "Ġcr oss", - "1 0", - "m o", - "ver t", - "Ġ1 7", - "z ie", - "å ķ", - "Ġd om", - "ĠB o", - "Ġset ting", - "Ġinvol ved", - "ar ily", - "Ġs ind", - "Ġs us", - "Ġwor ry", - "et h", - "ê¹ Į", - "Ġs un", - "Ġh ier", - "Ġcertain ly", - "ou l", - "ort s", - "ĠE r", - "ĠU m", - "Ġca us", - "Ġnat ural", - "Ġà ¼", - "Ġc ry", - "ĠSe c", - "Ġs om", - "æ ²", - "Ġeduc ation", - "а еÑĤ", - "Ġmult ip", - "Ġal one", - "Ġe ye", - "Ġr ate", - "ĠEuro pe", - "è ¿", - "m on", - "Ġf it", - "iz ing", - "pp ed", - "Ġpress ure", - "th e", - "и Ñģ", - "it es", - "ĠA f", - "re ci", - "att le", - "Ġserv ices", - "ĠGo ogle", - "é ģ", - "Ġc ases", - "Ġdri ve", - "Ġchall eng", - "u z", - "ĠM o", - "ìľ ¼ë", - "v al", - "åĢ ĭ", - "Ġf ol", - "Ġì ¢", - "ff ic", - "Ġr a", - "Ġs in", - "Ġbl ue", - "Ġaff ect", - "Ġm is", - "Ġsh ot", - "Ġо б", - "as ing", - "Ġsign ific", - "ĠC he", - "Ġê ³", - "Ġpos itive", - "ì £", - "Ġw ie", - "Ġ4 0", - "ord ing", - "ĠFr om", - "ê µ", - "Ġbra nd", - "Ġtr ust", - "Ġp le", - "Ġcommun ic", - "Ġwe ight", - "Ġask ing", - "Ġta x", - "ĠJ apan", - "ãģ Ł", - "Ġíķ ĺ", - "op s", - "Ï Ĥ", - "Ġput ting", - "Ġro ll", - "ĠAmeric a", - "re g", - "ŀ ×", - "at ures", - "ens ion", - "ĠS omet", - "Ġorig inal", - "p ing", - "Ġ ÅŁ", - "Ġproduct s", - "ãĥ ¼", - "Ġcont act", - "ol ution", - "Ġgo al", - "Ġp ow", - "Ġperform ance", - "Ġblo od", - "at ors", - "ĠM ich", - "Ġtem per", - "ĠD an", - "Ġsu gg", - "ÑĤ и", - "Ġim m", - "Ġoff ice", - "Ġar ri", - "Ġcom fort", - "ĠÐ Ķ", - "Ġsugg est", - "Ġpl at", - "Ĥ ĺ", - "1 9", - "Ġo m", - "Ġse ven", - "ĠC ent", - "ill e", - "Ġcon cept", - "Ġb ag", - "ü n", - "ive ly", - "Ġd iv", - "m os", - "æ ī", - "Ġfeel s", - "Ġ ir", - "ak es", - "le y", - "Ġpartic ip", - "ĠÐ ļ", - "f l", - "j ust", - "Ġs il", - "ĠP a", - "A L", - "Ġgot ta", - "Ġf an", - "Ġchall enge", - "Ġcompan ies", - "ĠPe ople", - "< /", - "оР·", - "Ġp en", - "is ing", - "Ġa us", - "em ic", - "am ente", - "Ġmeet ing", - "Ġvis it", - "Ġsupp osed", - "ĠOn ce", - "д а", - "or ld", - "3 0", - "U S", - "Ġvi ol", - "Ġnot ice", - "ĠÐ IJ", - "h an", - "p ed", - "ì ĺ", - "h h", - "Ġtr ou", - "Ġmin ute", - "ĠP ar", - "r ay", - "Ġt it", - "Ġup d", - "Ġblo ck", - "Ġd ue", - "a ur", - "Ġfor ce", - "Ġcou n", - "ĠâĢ Ķ", - "Ġtyp es", - "ë §", - "Ġl ate", - "Ġimpro ve", - "Ġì Ī", - "Ġa ve", - "ul es", - "c l", - "am ed", - "Ġaw esome", - "ĠO k", - "Ġv ot", - "Ġmach ine", - "Ġfollow ing", - "Ġme asure", - "ac ión", - "u el", - "ch an", - "Ġab ility", - "Ġt out", - "Ġide as", - "Ġincre ase", - "Ġen s", - "ĠÑ ħ", - "Ġë ª", - "Ġj est", - "ĠÐ ľ", - "Ġtr uth", - "h y", - "Ġsp end", - "Ġsci ence", - "et e", - "Ġ1 4", - "Ġepis ode", - "Ġal g", - "end ed", - "ãģ ĵ", - "ar i", - "ll a", - "Ġf ish", - "Ġthr ow", - "m it", - "å ¹", - "Ġcir c", - "ĠC al", - "Ġt our", - "Ġdire ction", - "Ġno ch", - "еР²", - "é n", - "Ġcount ries", - "Ġindust ry", - "in y", - "ic le", - "Ġfe et", - "I t", - "Ġlead ers", - "et zt", - "Ġst aff", - "ç Ķ", - "Ġpur p", - "it o", - "? !", - "ĠJ a", - "Ġst ore", - "et ic", - "ĠCh ina", - "Ġë IJ", - "ĠUn iversity", - "Ġ #", - "Ġdec ision", - "Ġach ie", - "Ġact ual", - "u ly", - "Ġse ction", - "Ġresult s", - "Ġst ar", - "Ġm ist", - "ib ly", - "Ġd ad", - "Ġnum bers", - "om b", - "è ª", - "ĠS pe", - "Ġm er", - "Ġ2 5", - "Ġaut om", - "Ġco ld", - "Ø ¨", - "Ħ ľ", - "ag er", - "ĠT V", - "ĠS ie", - "ĠH ave", - "Ġ że", - "ug g", - "ain ed", - "Ġup on", - "Ġlo g", - "Ġcomplet e", - "Ġbra in", - "ag ing", - "ĠM us", - "o ver", - "Ġeas ier", - "Ġinte gr", - "Ġm ás", - "Ġturn ed", - "Ġst ri", - "iv al", - "Ġhe av", - "ĠT H", - "Ġwr iting", - "ÑĢ а", - "åľ ¨", - "å¤ §", - "Ġcl a", - "d ing", - "Ġtell ing", - "и д", - "ic ated", - "ä» ¥", - "ac ht", - "ãģ Ĥ", - "h aps", - "ĠSt e", - "Ġres ources", - "Ġd ann", - "Ġpart y", - "Ġ ÏĦ", - "Ġsa f", - "is es", - "t re", - "o int", - "Ġknow ledge", - "Ġany more", - "Ġf ly", - "Ġma int", - "и к", - "å ij", - "Ġse ll", - "la ughs", - "ĠY ork", - "Ġb ien", - "Ġo d", - "Ġeas ily", - "Ġr ange", - "Ġo ption", - "Ø ¹", - "Ġapp reci", - "oc r", - "Ġdet erm", - "Ñ Ħ", - "Ġmean ing", - "Ġs ite", - "Ġdis co", - "ver age", - "Ġl ose", - "Ġinst all", - "Ġem ot", - "ant ly", - "ä t", - "Ġt amb", - "ĠW ar", - "ĠH o", - "ĠG en", - "em y", - "еР·", - "ĠP ol", - "Ġmess age", - "Ġnot e", - "Į Ģ", - "Ġh et", - "Ġim medi", - "Ġav o", - "Ġbook s", - "Ġbecom es", - "res h", - "è s", - "as ons", - "Ġhim self", - "ut s", - "Ġj u", - "Ġaw are", - "Ġrequ ire", - "Ġsystem s", - "ĠH ar", - "Ġam ong", - "Ġh om", - "Ġb reat", - "Ġwe ird", - "Ġë ¶", - "Î »", - "Ø ©", - "if f", - "or ing", - "Ġplat form", - "ĠT ake", - "Ġhelp s", - "ut ions", - "Ġfor g", - "Ġl uck", - "ĠEng lish", - "Ġwe b", - "Ġneg ative", - "Ġt ut", - "Ġab ove", - "ng th", - "Ġê ±°", - "Ġst ories", - "Ġlo ad", - "Ġback ground", - "Ġsw itch", - "g a", - "Ġprin ci", - "Ġfin an", - "Ġvar ious", - "Ġl Ãł", - "Ġkind s", - "ain ing", - "Ġn ature", - "ĠÐ ŀ", - "c z", - "Ġpr ay", - "Ġg ar", - "ir m", - "Ġ &", - "Ġì ĥ", - "n s", - "ĠR ep", - "ĠF e", - "Ġre v", - "ra nd", - "Ġlike ly", - "Ġunderstand ing", - "ı r", - "ãģ ĭ", - "Ġf al", - "Ġ1 3", - "ÑĨ и", - "Ġsu d", - "Ġbr other", - "Ġpl ant", - "Ġthrough out", - "w ise", - "p re", - "Ġcult ure", - "ĠÙ ħ", - "Ġwonder ful", - "Ġa h", - "pp er", - "Ġso ld", - "Ġstart s", - "Ġwr itten", - "Î ¯", - "n i", - "Ġ×Ķ ×", - "ĠD av", - "Ġu lt", - "Ġar m", - "Ġro ck", - "Ġwe ar", - "ë į°", - "an o", - "ra g", - "Ġsqu are", - "ан и", - "c ast", - "le br", - "Ġliter ally", - "Ġplay ed", - "Ġhe at", - "on se", - "r ict", - "Ġins p", - "id s", - "Ġpop ular", - "ë ıĦ", - "Ġc atch", - "Ġm ount", - "Ġj ud", - "Wh at", - "еР±", - "R A", - "a ud", - "к о", - "Ġsur face", - "Ġcon v", - "Ġpie ces", - "O h", - "æ Ģ", - "Ġst yle", - "pp ing", - "Ġread ing", - "Ġconvers ation", - "оР¿", - "ä¾ Ĩ", - "ĠAg ain", - "Ġb ank", - "t ime", - "Ñĥ ÑĤ", - "er ve", - "ĠG reat", - "Ġcap t", - "аР±", - "ay s", - "ĠF in", - "ific ation", - "Ġä r", - "а Ñİ", - "Ġe gg", - "ĠW el", - "Ġtar get", - "ul a", - "ch es", - "an i", - "O O", - "ic ious", - "n ow", - "Ï ĥ", - "bo ard", - "Ġg ente", - "Ġd ro", - "ĠE t", - "Ġd in", - "Ġc os", - "Ġaut hor", - "Ø ³", - "Ġo ch", - "Ġem ail", - "Ġsp irit", - "Ġs itting", - "m as", - "Ġstre ngth", - "Ġbig ger", - "ĠW ait", - "Ġm at", - "Ġpol ice", - "ress ed", - "Ġwait ing", - "is hing", - "Ġdoll ars", - "ho od", - "s s", - "Ġimag ine", - "in i", - "Ġm es", - "Ġdis e", - "id ge", - "ab or", - "Ġp et", - "Ġh op", - "ĠK ing", - "Ġcomput er", - "Ġgo ld", - "Ġn u", - "Ġf ing", - ") ,", - "Ġsec urity", - "ru ction", - "Ġsol ution", - "e xt", - "Ġp atter", - "ick en", - "ure d", - "Ġstand ard", - "ìĭ ľ", - "Ġdou ble", - "Î ·", - "Ġw ife", - "is a", - "Ġdirect ly", - "ac ed", - "Ġb unch", - "Ġ ¿", - "ал ÑĮ", - "Ġreg ard", - "Ġswe et", - "Ġun ique", - "ĠâĻ «", - "Ġtra in", - "ĠG erm", - "Î ¬", - "R E", - "Ġbeh av", - "Ġpre d", - "ì ĥ", - "s et", - "Ġdescri ption", - "é e", - "Ġc at", - "å ĵ", - "Ġcoll ege", - "ì Ľ", - "Ġapplic ation", - "ĠS en", - "as k", - "Ġc red", - "ub lic", - "Ġmultip le", - "Ġn i", - "Ġpres ident", - "Ġadd ed", - "Ġro b", - "Ġaqu i", - "Ġh osp", - "Ġtool s", - "Ġg un", - "Ġbas ic", - "Ġl ines", - "Ġst ructure", - "ĠR uss", - "Ġtot ally", - "Ġbig gest", - "Ġe en", - "Ġar g", - "Ġ× ľ", - "Ġp ark", - "ĠD es", - "Ġce lebr", - "Ġf ait", - "ен ÑĮ", - "Ġsu ff", - "Ġreg ular", - "¨ ë", - "Ġm ine", - "ĠK ore", - "Ġpre vious", - "Ġp i", - "Ġse g", - "Ġpol icy", - "Ġк о", - "ĠTr ump", - "Ġvac c", - "ó w", - "ĠS y", - "и Ñĩ", - "it ter", - "Ġpolit ical", - "r as", - "Ġal s", - "ел ÑĮ", - "Ġsha pe", - "an z", - "Ġon to", - "Ġar ch", - "Ġam b", - "ag ram", - "ĠS m", - "ct ions", - "Ġjo in", - "b or", - "å Ľ", - "Ġfr ame", - "ł ĩ", - "Ġcho ice", - "௠ģ", - "Ñĥ Ñİ", - "ĠC or", - "ĠS w", - "I T", - "Ġt end", - "ĠE ar", - "Ġto r", - "Ġev ents", - "Ġcla im", - "ĠD a", - "ĠM ark", - "Ġgroup s", - "Ġe ating", - "ĠW orld", - "Ġrec ently", - "Ġtast e", - "Ġsur v", - "à ¤", - "Ġsk ills", - "Ġи з", - "itt ed", - "Ġsh op", - "ìĿ ´ì", - "Ġest ab", - "ĠëĤ ĺ", - "Ġsecond s", - "ĠTh ose", - "ĠE nt", - "Ġì Ħ", - "ers on", - "Ġto wn", - "Ġc and", - "Ġopt ions", - "Ġ ing", - "V ID", - "Ġenc our", - "Ġr é", - "âĻ ª", - "Ġent re", - "Ġmove ment", - "ĠB en", - "Ġbir th", - "Ġwh e", - "Ġh ang", - "ĠE m", - "ig e", - "ro ll", - "Ġun f", - "ì Ĥ", - "Ġr id", - "Ġsp read", - "Ġh ost", - "al d", - "ĠE d", - "Ġcons um", - "U N", - "Ġop in", - "it ar", - "ĠM ed", - "Ġsub ject", - "Ġp al", - "Ġcar ry", - "Ġag ree", - "ĠWh ile", - "Ġcare er", - "Ġsci ent", - "Ġsud den", - "Ġf ile", - "z i", - "Ġex cept", - "é º", - "Ġpot ential", - "ĠAn other", - "Ġcomp lex", - "ĠS im", - "end o", - "Ġr ais", - "Ġphys ical", - "Ġd ate", - "ak er", - "ĠC ol", - "Ġpower ful", - "Ġmem ber", - "ra p", - "Ġsp ot", - "Ġs ource", - "Ġf em", - "é m", - "Ġem p", - "j i", - "iet y", - "Ġinf lu", - "Ġd ry", - "Ġlo ck", - "Ġz ero", - "ĠU h", - "Ġr out", - "Ġpor que", - "Ġ2 4", - "Ġt al", - "Ġfol ks", - "Ġla unch", - "Ġcomp on", - "ĠWel come", - "Ġk ann", - "ä n", - "ĠÑį ÑĤ", - "e es", - "ĠÙ Ī", - "Ġany way", - "Ġaud ience", - "äº º", - "Ġsl ight", - "on a", - "Ġu r", - "Ġrel ig", - "Ġext rem", - "ı z", - "ĠM a", - "Î ¼", - "Ġà ¶", - "Ġall ows", - "Ġf at", - "ĠF ace", - "Ġn ational", - "Ġinter view", - "ĠM c", - "é t", - "Ġc ute", - "el a", - "Ġsec ret", - "ĠW est", - "ĠD ep", - "Ġex erc", - "Ġhist or", - "Ġpri or", - "Ġ6 0", - "av a", - "ac her", - "y ond", - "ĠH a", - "Ġest e", - "in ary", - "ĠN orth", - "on st", - "Ġsm art", - "am s", - "ал и", - "Ġd ar", - "er ed", - "Ġfun ny", - "ĠO b", - "ĠBl ack", - "Ġrel ated", - "ĠB u", - "Ġsome where", - "ĠR em", - "n es", - "ment e", - "ĠRe ally", - "Ġcreat ing", - "Ġfam il", - "Ġsoci ety", - "Ġg el", - "Ġtrans form", - "Ä ĥ", - "Ġinclud e", - "Ġh ol", - "l ike", - "k o", - "air s", - "Ġп од", - "Ġpers pect", - "Ġb es", - "Ġparticular ly", - "Ġshow ing", - "ĠP art", - "Ġqu al", - "lo ck", - "Ġreal ity", - "ho ld", - "ict ion", - "o on", - "Ġv ir", - "ãģ «", - "it ary", - "Ġdr ug", - "Ġfe ature", - "Ġre asons", - "Ġ× ©", - "Ġwr ote", - "Ġf ant", - "Ġb and", - "Ù ĥ", - "en a", - "ke y", - "Ġear th", - "d om", - "Ġfe atures", - "Ġflo or", - "Ġspeak ing", - "Ġt ip", - "ĠA ust", - "Ġst ock", - "Ġch urch", - "Ġr ac", - "ìľ¼ë ¡ľ", - "ภĻ", - "ãĤ Į", - "k y", - "Ġresp onse", - "Û Į", - "ul ations", - "Ġsl ide", - "Ġgrad u", - "ci ous", - "Ġme ant", - "Ġ ==", - "Ġ× IJ×", - "ã ħ", - "Ġkind a", - "Ġsc ene", - "Ġm uit", - "Ġê° Ģ", - "r ast", - "re st", - "Ġplay ers", - "w a", - "Ġbro ad", - "Ġtom orrow", - "oc ol", - "ĠÑģ в", - "ĠB ar", - "ı k", - "Ġse a", - "Ġrem ove", - "Ġrem ind", - "ом Ñĥ", - "ĠS ince", - "Ġave c", - "ce ll", - "и Ñħ", - "Ġdoc ument", - "Ġê·¸ë Ł", - "Ġne igh", - "be at", - "Ġp Ã¥", - "Ġas pect", - "Ġd ed", - "lish ed", - "il s", - "Ġour selves", - "u ce", - "Ġhe y", - "ĠпÑĢ о", - "ent y", - "Ġas soci", - "ad os", - "um ber", - "Ġ ]", - "éĤ £", - "no v", - "Ġì Ļ", - "Ñĥ Ñĩ", - "Ġcond ition", - "ëĬĶ ëį°", - "Ġval ues", - "Ġsc en", - "min ist", - "Ġc ast", - "Ġgrow ing", - "Ġus er", - "Ġresp ond", - "l im", - "é r", - "y m", - "çľ ĭ", - "os es", - "sy ch", - "ĠÑĢ аз", - "Ġappe ar", - "Ġpro gress", - "eng th", - "Ġj ak", - "ĠD is", - "Ġpat ients", - "ĠS er", - "Ġg as", - "è re", - "ìĸ´ì ļĶ", - "Ġre ci", - "ìĿ ¸", - "Ġs ca", - "ep end", - "Ñģ к", - "аР¿", - "Ġb atter", - "Ġve h", - "ð Ł", - "Ġac com", - "Ġbe at", - "Ġpain t", - "Ġcont rib", - "Ġs ad", - "Æ °", - "al es", - "Ġt ree", - "b a", - "Ġb orn", - "ic ed", - "à® ķ", - "b and", - "Ġme chan", - "ĠD et", - "Ġcap ital", - "Ġdel iver", - "Ġfe ar", - "ŀ ĺ", - "ĠS outh", - "Ġb ought", - "Ġst ress", - "Ġv or", - "? ?", - "i h", - "ìķ ¼", - "Ġer a", - "ìĿ´ ë", - "а Ñı", - "is ions", - "iv ity", - "Ġhelp ed", - "Ġass ist", - "Ġplay er", - "r an", - "Ġimmedi ately", - "Ġmo ved", - "c ie", - "ê ±", - "Ġann oun", - "å ¿", - "ìŀ IJ", - "Ġprodu ction", - "Ġsum mer", - "Ġt un", - "Ġprogram s", - "G H", - "al ing", - "ir a", - "el ess", - ". )", - "Ġa verage", - "è¦ ģ", - "Ġgl ass", - "om an", - "if ically", - "Ġëĭ ¤", - "ĠC ong", - "ĠV er", - "Ġtr ick", - "Ġbe gan", - "Ġv ill", - "ê ±°", - "h ow", - "æ Ń", - "Ġt ill", - "Ġ9 0", - "ber t", - "Ġê ¸", - "Ġtemper ature", - "à ²", - "๠Ī", - "Ġgra ph", - "Ġê· ¸", - "Ġr ot", - "Ġmo b", - "A Y", - "a el", - "Ġre pe", - "Ġdev ice", - "Ġ19 9", - "Ġte le", - "Ġke pt", - "p a", - "æ ĸ", - "ver se", - "Ġst ream", - "е Ñĩ", - "ess ion", - "Ġstr ugg", - "z z", - "Ġdeg ree", - "Ġhelp ing", - "Ġsm ell", - "Ġper haps", - "p ro", - "Ġcont ext", - "Ġi k", - "Ġп еÑĢ", - "Ġcal cul", - "éº ¼", - "b ing", - "Ġreal ize", - "l am", - "ĠCh ar", - "y t", - "ĠìĿ ´ì", - "Ġd anger", - "ĠI m", - "a a", - "Ġlo ved", - "Ġpurp ose", - "Ġfinish ed", - "Ġpe ace", - "Ġo t", - "Ġglo bal", - "Ï Ģ", - "Ġab er", - "ĸ Ī", - "Ġcharac ters", - "Ġn ur", - "Ġdam age", - "Ġem er", - "Ġpre c", - "ĠW ir", - "Ġinst it", - "ij ×", - "Ġallow ed", - "b on", - "Ġto d", - "еР³Ð¾", - "Ġj etzt", - "Ġmed ic", - "Ġsmall er", - "ce ed", - "Ġlevel s", - "Ġint ell", - "W e", - "Ġse m", - "Ġcurrent ly", - "Ġmod ern", - "Ġcont ract", - "Ġdetail s", - "ortun ately", - "O S", - "Ġst ates", - "Ġad just", - "ant age", - "e z", - "ĠV ery", - "Ġsc ale", - "Ġre lease", - "Ġf az", - "Ġ ic", - "it ude", - "A C", - "ĠP at", - "id en", - "Ń IJ", - "Ġpre fer", - "olog ical", - "ĠFace book", - "Ġê° Ļ", - "Ġ ..", - "ĠM ake", - "Ġко ÑĤоÑĢ", - "ĠDav id", - "ĠAf ric", - "Ġmod e", - "ĠC ity", - "Ġsh all", - "ĠÑ Ħ", - "im in", - "Ġз а", - "r om", - "u a", - "Ġbe yond", - "Ġdist rib", - "к Ñĥ", - "ĠDo es", - "Ġv ict", - "r ate", - "Ġv ai", - "Ġsuccess ful", - "Ġh ous", - "ah a", - "est s", - "ĠE st", - "Ġdisco ver", - "Ġthere fore", - "ch a", - "Ġc up", - "Ġpop ulation", - "ĠI l", - "s c", - "Ġsp ent", - "re l", - "Ġuse ful", - "Ġt ab", - "æ Ŀ", - "Ġ Å", - "Ġìł ľ", - "Ġcon se", - "Ġqu ant", - "ay a", - "Ġb on", - "åı ¯", - "ĠCh in", - "Ġê² ĥ", - "ound s", - "е ÑĪ", - "ell e", - "Ġ ice", - "2 1", - "Ġk ick", - "ä¸ ĭ", - "Ġstep s", - "Ġton ight", - "нÑĭ й", - "ren ch", - ". '", - "Ġgra b", - "Ġimp lement", - "ĠìĪ ĺ", - "Ġmiss ion", - "Ġclear ly", - "Ġappreci ate", - "è Ģ", - "Ġf resh", - "ar m", - "ĠTw o", - "Ġex ec", - "Ġproject s", - "Ġcommun ities", - "ri ble", - "Ġreg ion", - "Ġfre qu", - "ro y", - "Ġhow ever", - "Ġpart ners", - "an c", - "Ġmin im", - "Ġl at", - "Ġfamil ies", - "Ġev idence", - "Ġp un", - "ra ft", - "Ġl oss", - "Ġma p", - "Ġany body", - "Ġchang ing", - "Ġr ules", - "Ġorgan ization", - "Ġess entially", - "ĠR ed", - "Ġele ment", - "æ Ĺ", - "Ġv irt", - "r at", - "Ġpr int", - "and er", - "are n", - "em os", - "ο Ïħ", - "Ġcond itions", - "ab e", - "Ġd ance", - "и ÑĢ", - "Ġd os", - "о Ñĩ", - "ĠQ ue", - "Ġwalk ing", - "Ġt ro", - "Ġ id", - "Ġadd itional", - "Ġfull y", - "Ġf ans", - "Ġadd ition", - "Ġlik ed", - "Ġü ber", - "Ġb ow", - "d i", - "Ġm aster", - "o ff", - ") :", - "m ber", - "Ġë ¬", - "å ¯", - "åĪ °", - "la use", - "Ġo der", - "Ġsaf ety", - "Ġre act", - "à® ¿", - "b t", - "Ġdis app", - "Ġgirl s", - "S t", - "ĠA ng", - "Ġfa ith", - "Ġturn s", - "Ġt ight", - "Ġm outh", - "am i", - "z er", - "Ġwe ap", - "Ġб Ñĥд", - "Ġhosp ital", - "ra id", - "Ġmic ro", - "ĠSt ate", - "ĠM ost", - "ag n", - "Ġdec ide", - "Ġpat ient", - "Ġcor ner", - "Ġdi ed", - "N o", - "ĠSt ud", - "re nd", - "em pt", - "Ġli e", - "Ġl if", - "ĠBe fore", - "t ó", - "ĠSu per", - "Ġbe ll", - "6 0", - "Ġpriv ate", - "ĠPa ul", - "Ġg ib", - "Ġag re", - "´ì Ħľ", - "Ġs ig", - "Ġinvest ig", - "Ñı ÑĤ", - "en ing", - "Ġdist ance", - "Ġwar m", - "Ġdig ital", - "å¾ Ī", - "in er", - "Ġp and", - "ĠCO VID", - "Ð ³Ð¾", - "g n", - "Ġr ace", - "Ġpr oud", - "Ġte aching", - "Ġ ÑĤо", - "ìŀ ¥", - "ĠAll ah", - "I n", - "Ġw ood", - "Ġcol ors", - "Ġw ird", - "u j", - "id ad", - "Ġcustom ers", - "Ġconnect ed", - "Ġlay er", - "Ġachie ve", - "Ġperspect ive", - "ĠC oll", - "Ù Ĥ", - "Ġcl oud", - "!! !", - "Ġend ed", - "łĩ ê²Į", - "Ġmanage ment", - "Ġr ich", - "Ġsub st", - "Ġrem o", - "Ġser ve", - "Ġres ist", - "Ġthought s", - "Ġgrow th", - "ili ar", - "Ġright s", - "Ġchar ge", - "Ġcons ist", - "Ġwer den", - "Ġem b", - "and om", - "Ġhur t", - "Ġk an", - "i as", - "л о", - "Ġsh it", - "Ġbe g", - "Ġrece ived", - "it ation", - "Ġme at", - "Ġis so", - "ff ee", - "Ġfam ous", - "Ġcomfort able", - "I L", - "ĠB ye", - "èª ª", - "åĢ ij", - "oth es", - "Ġmed ical", - "Ġenjoy ed", - "Ġhealth y", - "Ġw y", - "c ies", - "Ġeff ort", - "Ġdo ctor", - "Ġmil itary", - "L AU", - "Ġg ro", - "Ġb attle", - "Ġf ed", - "Ġcap ac", - "Ġaf raid", - "iv il", - "ĠвÑģ е", - "Ġl ength", - "ys is", - "Ġbe i", - "¤ í", - "Ġorgan iz", - "or g", - "in c", - "Ġinter act", - "ĠChin ese", - "Ġacc ording", - "Ġincred ible", - "Ġkill ed", - "Ġda ughter", - "ĠÏ Ģ", - "Ñĭ в", - "Ġschool s", - "Ġ «", - "ll er", - "Ġshould n", - "n al", - "Ġcr is", - "Ġch icken", - "Ġf aster", - "Ġextrem ely", - "Ġopp os", - "Ġn ous", - "Ġ +", - "ri a", - "Ġfinan cial", - "Ġexc iting", - "Ġjour ney", - "×Ļ× Ŀ", - "ł ë", - "Ġdis play", - "Ġmem ory", - "Ġheav y", - "н е", - "Ġpass ed", - "ÑĢ и", - "il es", - "Ġp sych", - "Ġspec ifically", - "Ġeng age", - "Ġl ed", - "or ge", - "ĠD em", - "ord er", - "Ġ8 0", - "Ġcre am", - "ester day", - "Ġed ge", - "Ġп ол", - "Ġbu ll", - "Ġind ic", - "Ġk tó", - "Ġhope fully", - "um ents", - "ag en", - "н ого", - "Ġh ate", - "ch t", - "8 0", - "Ġeff ic", - "Ġì§ Ģ", - "Ġintern et", - "Ġbud get", - "Ġproper ty", - "id ay", - "Ġì ļ", - "Ġм ож", - "ol a", - "Ġshow ed", - "ĠM on", - "Ġthous and", - "A P", - "Ġpo or", - "us ed", - "ĠJ ack", - "Ġs Ã¥", - "ĥ ½", - "Ġes c", - "Ġsoft ware", - "Ġqu ar", - "ĠØ ¨", - "Ġnecess arily", - "om en", - "i y", - "Ġevent ually", - "ish ed", - "Ġbr ight", - "E D", - "Ġs pl", - "Ġdem and", - "Ġth reat", - "Ġs ir", - "Ġrele ased", - "ck et", - "ĠâĢ «", - "Ġrequ ired", - "Ġv ote", - "ì ¹", - "à® ¤", - "Ġdevelop ed", - "ĠìĤ ¬", - "at ory", - "Ġd ir", - "ca pe", - "Ġslight ly", - "à ¬", - "๠ī", - "re et", - "Ġdise ase", - "Ġcour t", - "Ġitem s", - "ĠEar th", - "ÑģÑĤ и", - "ж е", - "ì ²", - "Ġchalleng es", - "ĠBr it", - "Ġdesign ed", - "1 2", - "Ġhear ing", - "Ġlisten ing", - "z o", - "ĠÑģ л", - "ãģ§ ãģĻ", - "Ġper o", - "Ġwe aring", - "pl ic", - "Ġch em", - "Ġbal ance", - "Ġb a", - "Ġrece ive", - "im a", - "Ġsignific ant", - "Ġм Ñĭ", - "an ch", - "ĠC r", - "ĠC oun", - "ê¸ Ī", - "Ġjo bs", - "Ġoffic ial", - "Ġper m", - "om s", - "Ġopportun ities", - "Ġover all", - "Ġh us", - "od es", - "Ġn ation", - "ĠR eg", - "Ġor d", - "Ġrest aur", - "Ġì Ĩ", - "Ġm el", - "v in", - "Ġw enn", - "Ġk ön", - "æ ĥ", - "Ġopin ion", - "ãĤ Ĥ", - "è ¬", - "ĠSomet imes", - "ç Ĥ", - "Ñī е", - "as c", - "O U", - "Ġ20 20", - "Ġdel icious", - "ig er", - "Ġìķ Ī", - "o le", - "Ġhand le", - "Ġc it", - "Ġíķ ľ", - "Ġf ör", - "o oth", - "Ġnecess ary", - "Ġind epend", - "æ Ħ", - "ist en", - "h am", - "Ġé t", - "ãĥ ³", - "Ġmult i", - "Ï Į", - "? )", - "Ġcamp us", - "Ġtop ic", - "Ġr ain", - "Ġpan el", - "ĠS am", - "Ġlar ger", - "aud ience", - "Ġpa id", - "Ġeconom ic", - "ol t", - "Ġstre et", - "ĠC ont", - "Ġdri ving", - "Ġìł Ģ", - "Ġh ay", - "Ġprofess ional", - "ĠIn tern", - "å ¸", - "Ġin put", - "Ġc ateg", - "Ġc ro", - "Ġ ll", - "E T", - "Ñĭ й", - "* *", - "ĠZ e", - "B LE", - "Ġì ¤", - "re es", - "ĠÐ ¯", - "ed e", - "ier t", - "Ġfo ld", - "Ġd ur", - "ĠN ational", - "Ġìĸ ´ë", - "an ced", - "Ġfa ire", - "ut ed", - "Ġk ing", - "Ġw ild", - "o i", - "up beat", - "Ġpre vent", - "i us", - "Ġà ¨", - "Ġw ide", - "Ġr ing", - "Ġtit le", - "Ġstand ing", - "Ġal though", - "Ġh i", - "Ġsa uce", - "Ġs ides", - "Ġanim als", - "il ing", - "at ives", - "ìĹIJ ìĦľ", - "ĠO ver", - "Ġdes p", - "Ġconsider ed", - "ar ies", - "i ers", - "Ġein en", - "Ġs ister", - "Ġë ķ", - "ĠS ure", - "ãĤ ĭ", - "ri end", - "a ign", - "Ġsh own", - "Ġs ac", - "Ġs ont", - "Ġcent ury", - "Ġt ien", - "ĠÎ º", - "ĠS T", - "åķ Ĭ", - "Ġold er", - "ie m", - "Ġtr uly", - "ĠS i", - "Ġwind ow", - "iqu es", - "ar io", - "æ² Ĵ", - "Ġloc ation", - "Î º", - "Ġì ľ", - "v i", - "ag ue", - "ĠS orry", - "Ġdis p", - "Ġhe ll", - "Ġà ī", - "Ġtr ade", - "Ġcrit ical", - "Ġê ±", - "Ġn amed", - "Ġprep ared", - "ĠH ouse", - "al u", - "Ġt ough", - "Ġtri p", - "Ġs and", - "c el", - "ü z", - "ĠP ut", - "Ġap art", - "is f", - "v is", - "Ġli br", - "a ven", - "Ġv ie", - "Ġeffect ive", - "ภ²", - "Ġmag n", - "Ġmuit o", - "Ġê µ", - "h al", - "Ġlim it", - "Ġn ine", - "Ġwill ing", - "ı ÅŁ", - "s p", - "еР³", - "h i", - "Ġal t", - "ĠJ an", - "Ġorig in", - "ĠU s", - "Ġele ments", - "Ġus es", - "Ġhelp ful", - "Ġfl at", - "Ġfam iliar", - "ĠP ark", - "Ġc ore", - "Ġclos er", - "Ġact ive", - "Ġad minist", - "C E", - "нÑĭ е", - "ç Ħ", - "Ġrel ative", - "Ġment al", - "Ġr andom", - "Ġpart ner", - "Ġut il", - "ph one", - "Ġr ule", - "w w", - "Ġìł ķ", - "Ġsch on", - "Ġco ffee", - "H A", - "Ġconnect ion", - "Ġun it", - "la ughing", - "l og", - "Ġapp l", - "л а", - "us ic", - "ĠB ra", - "Ġany where", - "AU DI", - "Ġsepar ate", - "bo x", - "Ġd ivid", - "Ġtest ing", - "Ġs ick", - "Ġwer en", - "ä» ĸ", - "Ġ׾ ×", - "Ġadv antage", - "Ġtrans fer", - "' .", - "Ġë ¹", - "Ġfind ing", - "н ой", - "Ġì¢ ĭ", - "Ġfor t", - "Ġeconom y", - "Ġl ack", - "Ġleav ing", - "Ġd im", - "å İ", - "ĠR es", - "Ø Ń", - "Ġdiscuss ion", - "еР¿", - "Ġg es", - "du ct", - "Ġch ain", - "Ġus ers", - "e ch", - "ÅĤ a", - "Ġdis h", - "Ġcare ful", - "Ġte acher", - "Ġopt im", - "Ġfl u", - "at ically", - "Ġref lect", - "Ġtreat ment", - "e ed", - "i ÄĻ", - "à ¹", - "à® ¾", - "Ġequ ip", - "Ġplan ning", - "Ġsol ve", - "ãģ Ŀ", - "ĠT om", - "Ġavo id", - "Ġp ou", - "Ġgreat er", - "l in", - "O L", - "ĠL u", - "ĠM ore", - "Ġatt ract", - "ê n", - "un a", - "Ġphot o", - "er ation", - "Ġplan et", - "Ġcop y", - "Ġvis ual", - "ir ing", - "Ġintern ational", - "Ġla ughing", - "Ġth ick", - "Ġhold ing", - "Ġbring ing", - "Ġlet ter", - "Ġb urn", - "Ġeffect s", - "it é", - "our s", - "O T", - "ê me", - "ĠSch ool", - "×ķ× ª", - "rop ri", - "l ig", - "α ι", - "Ġad ult", - "Ġsu gar", - "Ġr ide", - "Ġhigh light", - "Ġno body", - "Ġ2 1", - "Ġch at", - "ĠпÑĢ и", - "Ġin nov", - "ung en", - "Ġatt ach", - "ed om", - "å Ĭ", - "y l", - "Ġleg al", - "Ġr ice", - "Ġcoll abor", - "k ing", - "d own", - "æ Ļ", - "ãĤ Ĭ", - "Ġi h", - "ĠA c", - "ous ly", - "Ġr ap", - "Ġsol id", - "Ġgener ally", - "Ġpatter n", - "al i", - "ภŃ", - "Ġtrans l", - "in ter", - "a ult", - "Ġë ¨", - "Ġexp ress", - "Ġexam ples", - "Ġch ose", - "Ġtell s", - "ÃŃ s", - "ain t", - "ĠT ell", - "ĠMich ael", - "æ ¨", - "ĠN umber", - "Ġt ap", - "Ġexper iment", - "Ġbenef it", - "Ġì °", - "Ġse qu", - "Ġexp ensive", - "Ġgener ation", - "ĠM any", - "Ġadd ing", - "Ġk il", - "Ġcamp aign", - "ĠA nt", - "ra w", - "omm en", - "Ġs oul", - "j o", - "ĠAct ually", - "am m", - "ê² ł", - "Ġma xim", - "Ġsal t", - "Ġc ru", - "Ġcall ing", - "ãģ Į", - "Ġbas is", - "b an", - "Ġkeep ing", - "ĠM or", - "ed s", - "ì Ĩ", - "Ġto do", - "ам и", - "н Ñı", - "Ġli ved", - "ĠD u", - "ãĤ ī", - "å® ¶", - "for ce", - "å¹ ´", - "fer ence", - "al a", - "Ġocc ur", - "s k", - "Ġrec ent", - "Ġc ars", - "Ġtrad itional", - "ent le", - "² Ī", - "Ġhel d", - "Ġn ach", - "ĠCent er", - "er en", - "Ġb in", - "Ù ģ", - "Ġcomm e", - "Ġre ve", - "Ġìĺ ¤", - "Ġexpect ed", - "ab il", - "Ġfocus ed", - "o v", - "Ġi P", - "or ial", - "i ro", - "Ġet c", - "am ing", - "ĠS on", - "Ġy esterday", - "Ġstr ate", - "ĠÑ Ĩ", - "Ġë ı", - "p es", - "Ġactiv ity", - "Ġadv ice", - "Ġopen ing", - "f in", - "Ġre la", - "é ĸ", - "Ġinst ance", - "ĠEvery one", - "b l", - "p en", - "Ġvis ion", - "ĠA lex", - "if orn", - "Ġt ick", - "H e", - "Ġstrate gy", - "Ġk om", - "P E", - "ĠG l", - "Ġelect ric", - "1 5", - "Ġda ily", - "Ġhus band", - "Ġst ation", - "Ġanal ysis", - "yn am", - "Ġatt empt", - "Ġbill ion", - "v ant", - "Ġfor th", - "Ġm ath", - "al y", - "Ġbehav ior", - "ĠM as", - "k an", - "ĠD ay", - "Ġbl ess", - "Ġg ut", - "ĠH igh", - "o x", - "Ġd ress", - "Ġj ed", - "è ¯", - "å ĸ", - "Ġexperien ces", - "ist a", - "Ġfight ing", - "å ·", - "ĠÑģ к", - "Ġmost ly", - "a use", - "Ġpict ures", - "ен ÑĤ", - "Ġm ad", - "Ġmod els", - "ÑĪ е", - "ĠC ount", - "Å Ħ", - "ÅĤ o", - "ep t", - "O M", - "ĠA N", - "Ġtrou ble", - "4 0", - "Ġb ird", - "ul ate", - "Ġm ur", - "Ġprodu ce", - "Ġmar ried", - "b it", - "Ġthe ory", - "í ĺ", - "Ġlead er", - "ĠL ast", - "A A", - "è µ", - "Ġim ages", - "Ġexp and", - "ĠP or", - "Ġpur ch", - "ĠS an", - "ĠChrist mas", - "ĠAust ral", - "Ġw id", - "ĠM iss", - "Ġknow ing", - "Ġz e", - "s hip", - "k u", - "Ñħ од", - "ĠInst agram", - "ĠInd ia", - "Ġest a", - "ĠCal iforn", - "Ġ7 0", - "Ġdra g", - "Ġbr ush", - "Ġn ames", - "A nd", - "Ġy o", - "ill a", - "Ġsch ed", - "Ġdest roy", - "ye ar", - "Ġv amos", - "Ġ ÙĦ", - "ç a", - "Ġforg ot", - "и е", - "Ġra ise", - "re me", - "íķ ´", - "ĠG ive", - "Ġcont ain", - "ra b", - "Ġg ift", - "ĠÑģ п", - "Ġrequ est", - "Ġsh ut", - "Ġdeg rees", - "Ġbenef its", - "Ñĭ е", - "Ġstud ies", - "Ġend s", - "Ġevery where", - "Ġher o", - "op h", - "er ry", - "Ġmaterial s", - "en ed", - "N A", - "å į", - "Ġmu y", - "Ġwor se", - "ä» Ģ", - "ĠM ad", - "Ġdec isions", - "ion e", - "Ġfore ign", - "la ughter", - "i ber", - "ени Ñı", - "ãħ ĭ", - "Ġreal ized", - "Ġ ign", - "Ġwe ak", - "ĠÎ ¼", - "Ġsca red", - "Ġass um", - "A K", - "ï ¿", - "ï¿ ½", - "Ġcover ed", - "ĠS at", - "Ġо н", - "Ġindividual s", - "Ġcomp ared", - "1 1", - "ĠAd d", - "ic les", - "Ġc ert", - "r ar", - "Ġbr ief", - "Ġactiv ities", - "Ġf ab", - "b ar", - "Ġa st", - "ĠO ther", - "Ġclass es", - "Ġo g", - "Ġmiss ing", - "ãģ ł", - "é Ŀ", - "w ers", - "× ©", - "Ġintrodu ce", - "Ġequ ation", - "ãģ¾ ãģĻ", - "Ġn om", - "Ġpain ting", - "us hing", - "ĠA P", - "Ġencour age", - "Ġsh ip", - "itt ee", - "iver se", - "ot a", - "n am", - "ãĥ »", - "Ġexerc ise", - "ĠÐ Ń", - "Ġn as", - "Ġthous ands", - "ĠCaliforn ia", - "Ġs es", - "Ġr ow", - "ŀ Ī", - "Ġpand emic", - "Ġsk ill", - "b el", - "Ġdire ctor", - "Ġmil k", - "Ġn ut", - "Ġmot ion", - "Ġcl osed", - "è ¨", - "Ġcred it", - "ah r", - "Ġche ese", - "Ġal tern", - "im ately", - "Ġs ust", - "ĠT ra", - "Ġgl ad", - "Ġhigh ly", - "Ġw a", - "Ġredu ce", - "Ġb le", - "ad or", - "in ated", - "ion es", - "ci ent", - "Ġdep ending", - "Ġsh aring", - "Ġca ught", - "ra el", - "Ġme hr", - "Ġpass ion", - "ç Ľ", - "Ġr u", - "Ġfar m", - "T I", - "av es", - "ĠR ob", - "ĠB ro", - "Ġmot iv", - "ret ch", - "ru pt", - "ĠB ig", - "Ġall e", - "Ġet t", - "ub s", - "ĠJapan ese", - "ĠH all", - "и ли", - "AUDI BLE", - "ç ¬", - "Ġcell s", - "ik a", - "el ine", - "il er", - "Ġì £", - "Ġsk y", - "IN AUDIBLE", - "end e", - "ap ter", - "Ġp in", - "Ġg ather", - "h ol", - "le ction", - "Ġsy n", - "Ġpl ug", - "r ound", - "Ġun iversity", - "h ib", - "Ġfant astic", - "k n", - "Ġho le", - "ĠRem ember", - "in ct", - "ak s", - "C H", - "Ġbro ken", - "Ġstr ateg", - "Ġal ive", - "Ġt ank", - "Ġc art", - "r ated", - "r ie", - "ĠSt ep", - "ĠEvery thing", - "Ġb ound", - "Ġso bre", - "Ġcustom er", - "¡ Į", - "ur g", - "ĠB ill", - "L a", - "wh at", - "Ġre action", - "Ġs ession", - "Ġpl ans", - "ĠìĿ´ë łĩê²Į", - "Ġdown load", - "ì Ļ", - "u er", - "Ġc ab", - "Ġinst r", - "if ying", - "ĠN ice", - "Ġteam s", - "ı l", - "Ġgo als", - "is ch", - "Ġtrans port", - "Ġanim al", - "Ġcost s", - "Ġcall s", - "Ġse hr", - "ì Ī", - "ri an", - "Ġd ial", - "Ġwe ather", - "๠Ģ", - "Ġв оÑĤ", - "ĠPl ay", - "Ġsh ared", - "Ġsm ooth", - "ab a", - "Ġleav es", - "à® ©", - "Ġconc ent", - "Ġsh ift", - "ĠëIJ ĺ", - "ĠGo vern", - "Ġdem onst", - "Ġbut ter", - "ĠìĹ ¬", - "Ġsat isf", - "Īë ¬", - "Ġrecogn ize", - "ĠF rench", - "Ġvol ume", - "ä nd", - "Ñĥ м", - "Ġì§ Ħ", - "ĠKe ep", - "ow a", - "ipp ed", - "ÑģÑĤ ÑĢ", - "Ġdet ect", - "ĠÏ ĥ", - "Ġl ift", - "Ġcl othes", - "ĠSt op", - "à µ", - "m et", - "Ġcl in", - "Ġar r", - "f riend", - "Ġst uck", - "Y e", - "h and", - "um a", - "Ġsc ri", - "Ġfuck ing", - "ct ors", - "× ª", - "Ġjo ining", - "Ġc ette", - "ĠØ £", - "ĠWh ite", - "Ġi hr", - "Î Ń", - "ãģ Ń", - "Ġinclud ed", - "ess o", - "Ġac ad", - "b um", - "Ġs ab", - "Ġд лÑı", - "è¿ Ļ", - "uf act", - "ĠRep ublic", - "r im", - "Ġye llow", - "Ġlim ited", - "T ER", - "ĠT y", - "Ġnot es", - "v est", - "и з", - "al ed", - "Ġph ase", - "and a", - "ĠM om", - "R I", - "Ġim mer", - "m al", - "Ġin j", - "Ġy ang", - "ud ible", - "аР³", - "Ġset t", - "Ġmag ic", - "Ġens ure", - "Ġsp ring", - "Ġsh ock", - "Ġwhe el", - "ог да", - "ãĤ Ī", - "Ġcan cer", - "Ġro ot", - "Ð IJ", - "gen cy", - "Ġë į", - "i i", - "Ġout put", - "Ġcomm it", - "Ġwork ers", - "ìķĦ ìļĶ", - "ĠÑģ ам", - "ve y", - "Ġpe u", - "Ġc ivil", - "is c", - "Ġbr ings", - "ÑĢ ав", - "an ia", - "Ä ģ", - "c raft", - "mb ol", - "Ġintell ig", - "b i", - "ac ing", - "y ou", - "Ġbecom ing", - "ĠD er", - "em a", - "å°± æĺ¯", - "Ġing red", - "Ġcomm and", - "Ġupd ate", - "Ġpre m", - "Ġopen ed", - "Ħ ¤", - "ени е", - "Ġg ard", - "Ġstat ement", - "Ġsc rew", - "Ġpr ote", - "Ġc ards", - "Ġt ask", - "Ġeven ing", - "Ġst itch", - "in en", - "ĠB er", - "m ark", - "ĠD ad", - "Ġе ÑģÑĤÑĮ", - "Ġ× ŀ×", - "ìĹ Ī", - "Ġb an", - "Ġcl im", - "Ġfre edom", - "Ġnorm ally", - "еÑģ ÑĮ", - "å ¦", - "Ġprov ided", - "Ġìŀ IJ", - "ĠìķĦ ëĭĪ", - "ĠK im", - "ied er", - "ìĿ Į", - "Ġcit iz", - "Ġb ike", - "Ġb ak", - "Ġno ise", - "Ġcl imate", - "iz es", - "å¾ Į", - "Ġincre asing", - "ĠTH E", - "Ġli qu", - "Ġperson ally", - "e f", - "res p", - "Ġleg s", - "ind er", - "Ġp ed", - "Ġë§ İ", - "Ġdep end", - "Ġvar iety", - "ĠIs rael", - "Ġwas h", - "å Ĩ", - "Ġqu iet", - "ĠJ ames", - "ĠJ ew", - "Ġfore ver", - "ĠI nt", - "Ġcoun ter", - "ur ance", - "ĠAny way", - "ca re", - "ĠOn ly", - "ci ón", - "ad i", - "ĠE v", - "ëĭĪ ê¹Į", - "ĠÎ ±", - "Ġslow ly", - "Ġо д", - "Ġnot iced", - "ier en", - "Ġfe ll", - "ĠÐ ij", - "Ġm ême", - "Ġwhen ever", - "! )", - "ĠH y", - "å ¼", - "ord s", - "us ion", - "ĠSt ar", - "Ġí ĺ", - "ĠM ac", - "ä¸ Ĭ", - "i ven", - "Ġìĭ ľ", - "ĠìĹ Ĩ", - "ĠT ur", - "Ġg er", - "r is", - "Ġve z", - "Ġл Ñİ", - "Ġvers us", - "ا Ø", - "ocol ate", - "Ġplan e", - "Ġz o", - "Ġsu it", - "Th is", - "Ġn erv", - "ĠA cc", - "Ñĥ ж", - "ìĤ ¬", - "n h", - "em e", - "Ġa uss", - "Ġme as", - "Ġtr ès", - "Ï ī", - "Ñģ ли", - "ĠAr t", - "ĠSec ond", - "олÑĮ ко", - "ch o", - "it ect", - "е ÑģÑĤ", - "Ġb oss", - "Ġinc ome", - "ł ¤", - "Ġsh ad", - "Ġapp ropri", - "ĠM al", - "op t", - "Ġart ist", - "Ġplay s", - "oth ers", - "ĠIn ter", - "Ġvir us", - "Ġh ung", - "Ġconst ant", - "Ġscri pt", - "Ġsn ow", - "ul f", - "k et", - "Ġdev ices", - "Ġmet al", - "ight s", - "ìĦ ¸", - "Ġsal es", - "Ġve get", - "Ġcollect ion", - "Ġv ia", - "k er", - "Ġgot ten", - "O W", - "i én", - "Ġacc ur", - "Ġw ave", - "ult y", - "ĠA ir", - "Ġlead ing", - "ic ing", - "Ġcent ral", - "ĠChrist ian", - "f r", - "ĠAl though", - "Ġsong s", - "Ġf if", - "нÑĭ Ñħ", - "Ġbel ong", - "oss ible", - "ì °", - "Ġphot os", - "is l", - "Ġrela x", - "s a", - "US IC", - "ê ·", - "Ġman ufact", - "ĠTw itter", - "Ġdanger ous", - "Ġhy d", - "le ar", - "i ant", - "ĠâĢ ¦", - "Ġsudden ly", - "Ġla ugh", - "Ġang le", - "ĠG ot", - "Ġwor ried", - "о е", - "Ġp ap", - "ĠM art", - "en o", - "Ġbatter y", - "Ġп оÑģ", - "Ġlight s", - "Ġar ms", - "ĠA bs", - "m es", - "âĢ ĵ", - "use um", - "Ġte a", - "ĠM ic", - "Ġfor mer", - "ograph y", - "Ġapplic ations", - "ĠD ire", - "çĦ ¶", - "Ġfeed back", - "itch en", - "yor um", - "u ed", - "ig t", - "Æ° á»", - "os ition", - "ĠD el", - "Ġíķ ĺë", - "ĠB ack", - "ad s", - "Ġpr ime", - "ì£ ¼", - "ì£ ł", - "× ij", - "Ġm ut", - "] .", - "ĠÐ Ĺ", - "lo c", - "k in", - "Ġexper t", - "Ġal right", - "ung s", - "Ġsupp ly", - "Ġleaders hip", - "ĠF ra", - "Ġtyp ically", - "Ġs el", - "Ġtre es", - "Ġ2 2", - "h ar", - "Ġwor st", - "Ġbus y", - "ant o", - "ĠU p", - "ĠB as", - "Ġpresent ation", - "Ġstr ange", - "Ġth in", - "ÑĤ е", - "Ġveh icle", - "Ġд о", - "cell ent", - "7 0", - "Ġt ired", - "Ġcris is", - "Ġt iny", - "as y", - "Ġr an", - "é ĩ", - "Ġfor ces", - "Ġо Ñĩ", - "Ġident ify", - "Ġass ess", - "иÑĤ е", - "S E", - "Ġcreat ive", - "ç Ł", - "Ġdep artment", - "Ġinit ial", - "æĪij åĢij", - "ĠD am", - "ak t", - "v ere", - "Ġinf ect", - "Ġp ump", - "Ạ¡", - "Ġv iel", - "Ġr are", - "Ġd ot", - "ash ion", - "em pl", - "Ġf lex", - "Ġk on", - "Ġtr uck", - "Ġle ct", - "Ġpl astic", - "la w", - "Ġlik es", - "Ġr ough", - "ĠM AT", - "í ŀĪ", - "Ġcomm er", - "Ġas se", - "Ġc ake", - "Ġact ions", - "Ġad m", - "Ġother wise", - "ĠHe alth", - "Ġcoll e", - "à¹Ģ à¸", - "Ġr ub", - "å¾ Ĺ", - "æ Ķ", - "Ġsc r", - "Ġz um", - "ĠH im", - "Ġch amp", - "Ġconcern ed", - "Ġ5 00", - "Ġpl ate", - "ĠO ut", - "Ġdon c", - "Ġequip ment", - "Ġta ught", - "ll ed", - "Ġí Ļ", - "iv a", - "Ġmot or", - " »", - "Ġgu ide", - "å ī", - "Ġstop ped", - "Ġr at", - "Ġlab or", - "Ġa im", - "Ġprep are", - "ĠÑ Ī", - "Ġshoot ing", - "ann ed", - "cri pt", - "Ġen emy", - "Ġdep ends", - "Ġn av", - "Ġb er", - "Ġland s", - "Ġun ivers", - "i u", - "Ġfact or", - "ok ing", - "Ġcar bon", - "b ut", - "ĠL ove", - "el d", - "ĠÎ µ", - "Ġg a", - "Ġé s", - "Ġbre ad", - "Ġvol t", - "í Ĭ", - "Ġwas te", - "Ġkeep s", - "æī Ģ", - "Ġst or", - "Ġhon or", - "Ġun less", - "Ġcol um", - "Ġë ĮĢ", - "Ġpl ants", - "Ye ah", - "Ġinclud es", - "ä¸ Ń", - "Ġo x", - "Ġpe ut", - "ë§ Į", - "ìĥ ģ", - "ist ry", - "ภ±", - "ĠDep artment", - "ant a", - "Ġfing er", - "Ġst retch", - "Ġsy mbol", - "Ġneigh bor", - "æ ¬", - "ê° Ħ", - "~ ~", - "ĠÑĤ Ñĭ", - "ĠA ber", - "k es", - "Ġmass ive", - "ĠC H", - "ĠS al", - "× ł", - "ãĤ Ĵ", - "Ġd ynam", - "ach e", - "ĠP re", - "Ġmon itor", - "ent ed", - "E O", - "Ġrais ed", - "ist ics", - "Ú ©", - "Ġv ou", - "it en", - "¡ °", - "Ġbusiness es", - "Ġe arn", - "Ġmob ile", - "id ade", - "Ġha be", - "y r", - "l ict", - "Ġcon duct", - "Ġfed eral", - "Ġw o", - "b u", - "Ġn one", - "Ġteach ers", - "ĠاÙĦ Ø", - "éģ ĵ", - "id ents", - "ا ÙĦ", - "Ġtre nd", - "еР¶", - "Ġal bum", - "Ġm ich", - "b ased", - "ภµ", - "Ġtrans ition", - "Ġн о", - "õ es", - "h ost", - "ed y", - "ĠPro f", - "p an", - "ij n", - "Ġcapac ity", - "und o", - "Ġ× ij×", - "Ġbreat h", - "Ġм ен", - "Ġm ü", - "í Ļ", - "ĠA ut", - "hing ton", - "Ġn or", - "Ġg ain", - "po int", - "Y es", - "ĠØ ª", - "ĠN a", - "Ã¥ r", - "Ġi ç", - "ĠM ary", - "Ġsp in", - "Ġant i", - "åIJ §", - "Ġsome how", - "Ġlaw s", - "Ġmom ents", - "Ġg re", - "Ġmo ves", - "ĠW ould", - "Ġpred ict", - "Ġv ra", - "Ġ201 9", - "¶ Ħ", - "Ġfund ament", - "2 5", - "Ġp ure", - "Ġw ow", - "Ġis land", - "Ġinvest ment", - "Ġb ath", - "ĠY a", - "Ġhard er", - "Ġt ips", - "å Ĺ", - "Ġelect ron", - "ĠB ob", - "Ġb ond", - "od ies", - "ĠA ug", - "Ġgib t", - "Ġch air", - "Ġtw ice", - "w ood", - "Ġcl ar", - "Ġmas k", - "Ġhonest ly", - "Ġ201 8", - "t ies", - "' ,", - "Ġp ens", - "Ġsurpr ised", - "Ġcommunic ation", - "ãģ£ ãģ¦", - "Ġsp r", - "Ġwh ose", - "Ġst ars", - "× IJ×", - "ĠâĢ ĭ", - "Ġproper ly", - "Ġg rew", - "os ing", - "Ġdi vers", - "A D", - "Ġem pt", - "Ġexp ression", - "Ạ¿", - "ĠP al", - "ãģ Ĭ", - "Ġjust ice", - "Ġp air", - "w o", - "Ġse at", - "or ter", - "Ġlink s", - "ĠM er", - "Ġre nd", - "но е", - "up id", - "ĠH el", - "ĠM arch", - "ĠL o", - "Ñģ ÑĮ", - "Ġhas n", - "Ġev alu", - "ãģ ı", - "å¤ ©", - "il os", - "Ġfund ing", - "Ġv en", - "u an", - "ĠM aster", - "ĠO l", - "ĠF re", - "Ġy ap", - "ĠS ir", - "s ch", - "Ġmist ake", - "am an", - "Ġdin ner", - "ĠWas hington", - "Ġorganiz ations", - "Ġж е", - "av ing", - "Ġv ÃŃ", - "Ġbirth day", - "Ġbe ar", - "ĠÙ ģ", - "Ġaff ord", - "Ġre ven", - "Ġrelationship s", - "r ough", - "ĠT ime", - "Ġt ag", - "ĠS un", - "u ary", - "ĠP o", - "c ar", - "ab ilities", - "Ġpr ison", - "Ġl ic", - "ìł ķ", - "id den", - "Ġspec ies", - "é »", - "Ġf irm", - "Ġsc ore", - "Ġd it", - "Ġspe ct", - "Ġp el", - "Ġcompl icated", - "æ¨ £", - "Ġr ank", - "Ġoppos ite", - "Ġpick ed", - "Ġк он", - "el er", - "Ġm ig", - "ĠS l", - "ĠN et", - "Ġne ck", - "ĠFr ance", - "Ġtechn ical", - "ภ¡", - "Ġmil es", - "Ġprim ary", - "Ġse in", - "s es", - "Ġla ughs", - "b ra", - "ÅĽ ci", - "ri age", - "Ġn ic", - "et ers", - "Ġà ª", - "olog ies", - "ĠI S", - "r ad", - "ud o", - "ı nd", - "m ar", - "Ġex ch", - "Ġcompet ition", - "Ġauss i", - "ĠS erv", - "Ġre nt", - "Ġch ocolate", - "Ġw ieder", - "Ġnear ly", - "Ġspe ech", - "Ġun c", - "Ġpar am", - "ĠBrit ish", - "Ġrem ain", - "ภģ", - "ur t", - "ĠØ ¹", - "Ġcr ack", - "ail s", - "Ġprom ise", - "Ġpay ing", - "i ÃŁ", - "Ġad apt", - "ал а", - "Ġmov ies", - "Ġw ire", - "Ł ¬", - "æľ ĥ", - "Ġter rible", - "Ġs ó", - "Ġperfect ly", - "åij ¢", - "ord in", - "Ġj á", - "Ġimp ossible", - "ĠTh ree", - "Ġn h", - "Ġtur ning", - "r um", - "ĠB el", - "ig g", - "Ġrespons ible", - "и й", - "Ġincred ibly", - "w i", - "ian o", - "Ġhum ans", - "Ġà ĩ", - "Ġsetting s", - "Ġj oy", - "o ot", - "Ġdeal ing", - "ill ed", - "Ġsur round", - "Ġfollow ed", - "Ġposs ibly", - "Ġinit i", - "st en", - "Ġpr os", - "Ġcand id", - "Ġass ign", - "Ġviol ence", - "W ell", - "Ġr ise", - "P S", - "Ġtamb ém", - "Ġë ĵ¤", - "i ance", - "y an", - "Ġaud io", - "ĠB et", - "ĠAmeric ans", - "ĠAs s", - "is chen", - "ìŀ ħ", - "Ġult imately", - "Ġpol ic", - "Ġmajor ity", - "éĢĻ åĢĭ", - "ĠFin ally", - "er ap", - "Ġgu ard", - "ĠMAT T", - "Ġbr own", - "м и", - "Ġch a", - "ĠHo ly", - "Ġnerv ous", - "ipp ing", - "ÄĻ d", - "ĠS a", - "ĵ ľë", - "¶ Ģ", - "l ie", - "çľ Ł", - "Ġn uc", - "ĠA pr", - "é Ľ", - "ĠKore a", - "eg o", - "ĠCan ada", - "Ġkön nen", - "Ġcomp ar", - "Ġg anz", - "ĠM ais", - "Ġthem e", - "Ġk i", - "Ġdraw ing", - "az on", - "ĠO ff", - "t t", - "ĠW ind", - "Ġtod os", - "Ġob vious", - "на Ñı", - "I M", - "ĠÐ ł", - "we ll", - "Ġbl ow", - "Ġho ok", - "Ġcir cle", - "Ġë³ ´", - "Ġarch itect", - "ĠK r", - "Ġc ó", - "Ġprotect ion", - "eg a", - "å ĩ", - "Ġwatch ed", - "Ġans wers", - "Ġdi et", - "iv o", - "Ġpow der", - "Ġyour s", - "Ġhigh est", - "çĤ º", - "F F", - "å º", - "Ġbo ys", - "ö yle", - "Ġl unch", - "è¬ Ŀ", - "ĠI I", - "Ġset s", - "Ġmo le", - "Û ģ", - "Ġwin ter", - "Ġluck y", - "Ġrespons ibility", - "Ġsign al", - "Ġwond ering", - "Ġa x", - "Ġcook ing", - "ов оÑĢ", - "le g", - "Ġп оÑĤ", - "Ġsurpr ise", - "Ġdem ocr", - "Ġlo op", - "Ġj ag", - "Ġcur ious", - "Ġmarket ing", - "Ð Ŀ", - "ar on", - "ĠApp le", - "Ġvirt ual", - "Ġ19 8", - "no on", - "ĠM et", - "оÑģ ÑĤо", - "об Ñĭ", - "it u", - "ĠA w", - "Ġbu ying", - "Ġrestaur ant", - "ĠB ud", - "Ġdou bt", - "Ġgr ant", - "Ġver d", - "Ġc ash", - "Ġfac ulty", - "Th at", - "ĠE in", - "å¤ ļ", - "Ġw ed", - "it ness", - "ĠM ag", - "n el", - "Ġn arr", - "Ġacc ident", - "Ġmed ium", - "em ents", - "Ġcr ow", - "n ight", - "ìĿ ¼", - "ä¹ Ł", - "Ġlibr ary", - "аÑİ ÑĤ", - "Ġtamb ién", - "Ġrefer ence", - "Ġfour th", - "h ouse", - "v ention", - "Ġfill ed", - "ĠC our", - "ib r", - "Ġn g", - "Ġdevelop ing", - "Ġprov ides", - "Ġpo ll", - "Ġtra ffic", - "arent ly", - "à® Ł", - "Ġform s", - "Ġcl ient", - "Ġg entle", - "Ġmus s", - "ĠCong ress", - "ĠInd ian", - "ce an", - "Ġp il", - "Ġc zy", - "st ood", - "ut y", - "Ġn ä", - "Ġsp ending", - "Ġconst ruction", - "ina udible", - "Ġë§ Ī", - "Īë¬ ´", - "Ġìĥ Ŀ", - "om a", - "os en", - "ag o", - "Ġlar gest", - "ãħĭ ãħĭ", - "Ġun iverse", - "b es", - "os a", - "Ġе го", - "Ġd ude", - "ĠM AR", - "Ġind eed", - "ε ι", - "Ġman aged", - "ĠSh ould", - "S o", - "Ġappl ied", - "Ġfair ly", - "ĠD en", - "Ġanal y", - "Ġconst antly", - "Ñģ п", - "H ow", - "ĠS ay", - "en cies", - "ĠP C", - "Ġegg s", - "à® °", - "Ġet h", - "ĠEnt ão", - "in ar", - "i ot", - "Ġc z", - "ĠEurope an", - "ãģ Ī", - "ĠA M", - "Ġc á", - "Ġrad io", - "§ Į", - "Ġh ide", - "ä» Ĭ", - "ĠSt art", - "Ġcl ub", - "ĠH ope", - "Ġeff orts", - "lus ion", - "Ġc ities", - "h one", - "Ġreach ed", - "Ġgu id", - "ro id", - "Ġhar m", - "Ġcut ting", - "Ġb ul", - "1 8", - "i est", - "ĠMe x", - "Ġ iron", - "çŁ ¥", - "Ġafter noon", - "Ġha ll", - "Ġpr zy", - "Ġg osh", - "Ġinflu ence", - "Ġв ид", - "Ġincre ased", - "ĠMin ister", - "Ġdis ci", - "ĠP eter", - "Ġver t", - "Ġmen u", - "Ġse lling", - "ur ally", - "Ġqu ote", - "Ġ ¡", - "Ġcontin ues", - "mp re", - "ĠÅŁ ey", - "it ution", - "Ġна Ñģ", - "c les", - "ĠGerm an", - "c zy", - "ĠÐ £", - "B e", - "Ġk itchen", - "ĠT ry", - "i pe", - "Ġic on", - "ar p", - "Ġprov iding", - "ĠTr ans", - "Ġtechn ique", - "Ġh är", - "Ġinf rast", - "Ġsus p", - "ü ck", - "ic ip", - "ĠÐ ķ", - "Ġc in", - "ìĸ ´ë", - "Ġpr z", - "Ġcompon ent", - "Ġby e", - "ĠB ible", - "iz er", - "C h", - "Ġsol utions", - "Ġaccom pl", - "Ġ201 6", - "I E", - "ĠT a", - "Ġass ume", - "Ġliqu id", - "Ġë¨ ¹", - "Ġquar ter", - "Ġfem ale", - "ĠTh ink", - "Ġstat us", - "it ute", - "Ġco ach", - "Ġre in", - "Ġcomb ination", - "è ·", - "ĠT er", - "Ġobject s", - "Ġdist rict", - "Ġmake up", - "Ġmur der", - "w as", - "f en", - "Ġbow l", - "Ġpub lished", - "Ġsp orts", - "ãģ ¡", - "Ġident ity", - "Ġseem ed", - "Ġact ing", - "л Ñİ", - "ri x", - "Ġup load", - "Ġh ast", - "Ġbo at", - "ĠM od", - "ri o", - "Ġ =", - "Ġcy cle", - "¯ ¸", - "Ġl oud", - "ust ed", - "com ing", - "Ġ201 7", - "Ġon t", - "Ġleg isl", - "Ġst ruct", - "ĠSomet hing", - "Ġconf lict", - "Ġu pper", - "Ġman ager", - "Ġm ort", - "Ġf ra", - "ĠÄ °", - "ĠM ike", - "ĠW ork", - "Ġn ó", - "ph ere", - "ĠìĤ ¬ë", - "ĠL and", - "Ġfil ter", - "Ġprom ot", - "æ °", - "æĻ Ĥ", - "ķ ¼", - "Ġrecord ing", - "× Ŀ", - "Ġassoci ated", - "Ġf uel", - "und er", - "Ġele ction", - "Ġemploy ees", - "ĠCom p", - "ÑĢÑĥ г", - "ĠW o", - "ro l", - "Ġsa ved", - "ĠH on", - "ĠV i", - "åĪ Ĩ", - "ac a", - "p ret", - "Ġw et", - "Ġst upid", - "Ġl ad", - "Ġf est", - "Ġw ake", - "Ġи н", - "Ġgreat est", - "ĠJ im", - "Ġserious ly", - "Ġì ¹", - "Ġfeel ings", - "Ġ3 00", - "i ation", - "Ġbeaut y", - "Ġìŀ ĺ", - "Ġs an", - "ĵ ł", - "Ġ- (", - "Ġcons cious", - "Ġд ел", - "b ye", - "ç Ļ", - "M an", - "Ġlet s", - "Ġsho es", - "y d", - "ä¹ Ī", - "Ġdisapp e", - "ĠCount y", - "ĠSc ott", - "Ġbut t", - "Ġaqu ÃŃ", - "Ġconf ig", - "resp ond", - "LAU GH", - "© ëĭĪëĭ¤", - "Ġdivid ed", - "Ġac qu", - "Ġz one", - "Ġk omm", - "a ção", - "ì§ ľ", - "c ut", - "Ġ2 3", - "Ġmaxim um", - "ro g", - "Ġrun s", - "Ġcompon ents", - "Ġarri ved", - "Ġconf ident", - "ÑĢ ов", - "Ġhe ight", - "Ġpro ced", - "E M", - "ĠÐŃ ÑĤо", - "ĠM en", - "Ġtalk s", - "Ġconf idence", - "ĠChr is", - "Ġlead s", - "Ġn ose", - "f all", - "b b", - "ĠNot hing", - "is er", - "Ġindepend ent", - "Ġmin or", - "Ġsy m", - "l en", - "ci ence", - "Ġf ashion", - "Ġsex ual", - "Ġb un", - "h ere", - "Ġso il", - "Ġdies e", - "Ġsh ap", - "Ġempt y", - "Ġjour nal", - "ag on", - "ĠThe ir", - "Ġweek end", - "ÃŃ t", - "Ġer ror", - "Ġn ar", - "à ¸", - "è ©", - "an cy", - "Ġìķ Ĭ", - "Ġfore st", - "Ġha cer", - "Ġmiss ed", - "ãģ ķ", - "åı¯ 以", - "Ġev il", - "Ġstor age", - "Ġsing ing", - "in ha", - "Ġkn ock", - "Ġimp ress", - "ĠоÑĩ енÑĮ", - "ĠGo ld", - "ĠS ur", - "ĠP ort", - "åİ »", - "ĠL ond", - "Ġfaz er", - "ot y", - "ot o", - "Ġan x", - "ĠWill iam", - "Ġexist ing", - "pl ace", - "ĠC D", - "Î ³", - "ĠColl ege", - "l or", - "ĠE ast", - "s en", - "f ach", - "o ft", - "Ġexperien ced", - "Ġlo ves", - "im m", - "Ġpo ly", - "Ġes se", - "ì ¤", - "ĠG rand", - "è §", - "ch er", - "Ġvict im", - "ĠG es", - "л ÑĮ", - "v ision", - "Ġt all", - "Ġl ens", - "Ġз на", - "ĠB oth", - "Ġì ²", - "Ġsust ain", - "Ġarg ument", - "Ġfact ors", - "Ġautom atically", - "Ġfr uit", - "Ġli ber", - "Ġa le", - "ĠP ress", - "ĠB a", - "ĠÐ ³Ð¾", - "Ġhundred s", - "th at", - "ĠR ich", - "Ġreci pe", - "ĠI T", - "è ĩ", - "Ạ¥", - "Ġdescri be", - "Ġdri ver", - "ĠO ct", - "ĠM at", - "д е", - "Ġme al", - "Ġlat est", - "Ġth erap", - "Ġcomp are", - "ĠAm azon", - "Ġì¢ Ģ", - "ĠRuss ia", - "Ġstr ing", - "Ġk a", - "ĠComm un", - "Ġd ia", - "I s", - "Ġmill ions", - "Ġcor por", - "Ġcor respond", - "Ġfix ed", - "ĠJo e", - "Ù İ", - "Ġview s", - "Ġr iver", - "Ġstud io", - "ig ger", - "Ġfl avor", - "Ġpres ence", - "Ġun its", - "Ġsa ving", - "av our", - "Ġp esso", - "or ith", - "Ġh ers", - "ĠN at", - "as ion", - "ĠFr ank", - "о ÑĪ", - "ÅĤ y", - "í Ħ", - "Ġein em", - "Ġfun ctions", - "um an", - "Ġn orth", - "Ġìł Ħ", - "Ġhor se", - "v id", - "Ġple asure", - "а ÑĪ", - "é es", - "ind a", - "Ġt ail", - "Ġexpl ore", - "S T", - "Ġcommer cial", - "ĠD uring", - "ar l", - "] :", - "f it", - "Ġr ates", - "æ ³", - "M USIC", - "Ġhous ing", - "Ġein er", - "Ġsitu ations", - "æ ĭ", - "Ġdec re", - "Ġappropri ate", - "ен но", - "% .", - "Ġb ac", - "Ġw at", - "ens ity", - "ä h", - "kn own", - "it z", - "Ġemot ional", - "erv ation", - "Ġbl ind", - "1 6", - "í ĥ", - "大 家", - "Ġjo ined", - "Ġloc ated", - "ĠÑģ м", - "ad as", - "ber g", - "Ġd ess", - "Ġde ar", - "ed en", - "c os", - "Ġad opt", - "1 00", - "ow e", - "ĠChe ck", - "ism o", - "Ġsim pl", - "Ġang ry", - "Ġмен Ñı", - "ĠC am", - "Ġp ad", - "Ġatt end", - "Ġsam ple", - "æĹ ¥", - "Ġì Ľ", - "ĠI N", - "ul ous", - "ĠS ar", - "ĠSh ow", - "Ġinfrast ructure", - "ĠAug ust", - "Ġless on", - "Ġn iet", - "æ İ", - "Ġfo i", - "Ġbro ke", - "t r", - "ç ķ", - "Ġ4 5", - "Ġg ew", - "Ñĥ п", - "at i", - "Ġmaint ain", - "Ġart ists", - "ing er", - "æĿ ¥", - "er ved", - "I A", - "Ġequ als", - "Ġoper ation", - "ill y", - "ĠëĤ ´", - "Ġcrow d", - "Ġintern al", - "Ġtest s", - "ĠR ock", - "ĠC ons", - "ĠëĦ Ī무", - "w ar", - "Ġs ou", - "Ġch art", - "ĠJ une", - "ĠApr il", - "g ent", - "Ġv ent", - "Ġqu and", - "ĠKore an", - "im o", - "ç ī", - "id ers", - "Ġmount ain", - "ÑģÑĤ ав", - "æľ Ī", - "ij k", - "Ġdiscover ed", - "ĠS und", - "ĠS il", - "Ġso lo", - " ´", - "Ġsch ol", - "ĠE ach", - "ç µ", - "Ġb are", - "Ġí Į", - "ĠvÃŃ de", - "Ġingred ients", - "ĠIt s", - "Ŀ¼ ê³ł", - "Ġì Ĭ", - "Ï į", - "ĠLe e", - "Ġsc ary", - "Ġprinci p", - "Ġspirit ual", - "ì ħ", - "ĠH old", - "æ²Ĵ æľī", - "Ġdef ine", - "ĠL es", - "ĠN or", - "ĠE nd", - "Ġbl og", - "ĠG reen", - "аеÑĤ ÑģÑı", - "p art", - "el es", - "äº ĭ", - "ĠUnd er", - "Ġpart e", - "Ġ3 5", - "Ġse ctor", - "ĠS ept", - "Ġaut h", - "à® ®", - "om in", - "Ġcl ients", - "Ġc i", - "ĠFr iday", - "er as", - "Ġtw e", - "ul ated", - "Ġcult ural", - "ĠÑģв о", - "Ġëį Ķ", - "Ġà º", - "Ġpar ce", - "à® ²", - "Ġtrad ition", - "Ġjud ge", - "ĠGen eral", - "Ġdeterm ine", - "ĠIs n", - "ĠP L", - "ne ath", - "Ġmatter s", - "íķ ´ì", - "! ]", - "а Ñħ", - "Ġpo ol", - "Ġvari able", - "Ġvacc ine", - "Ġcaus ed", - "Ġw est", - "ĠY ep", - "f ast", - "Ġph ilos", - "hor a", - "Ġcontinu ed", - "Ġunf ortunately", - "ãģ į", - "æ ķ", - "Ġfl ight", - "Ġw rap", - "Ġhu h", - "ĠAbs olutely", - "Ġp ink", - "Ġrem ains", - "Ġn é", - "Ġf le", - "ĠS ol", - "Ġlos ing", - "Ġalg orith", - "Ġrequ ires", - "Ġfound ation", - "ĠB ur", - "Ġprofess ion", - "ĠM id", - "Ġë ŃIJ", - "c an", - "ĠM il", - "Ġyoung er", - "Ġappe ars", - "ter m", - "íķĺ ê³ł", - "ac le", - "ĠLond on", - "Ġengine ering", - "ภ¢", - "Ġadv ent", - "ìĦ¸ ìļĶ", - "Ġê¸ °", - "ĠM aj", - "ÑĢ ем", - "ing u", - "ĠU K", - "u ro", - "s pe", - "Ġt ent", - "Ġreport ed", - "ĠA L", - "H ey", - "Ġë§ IJ", - "Ġd ent", - "ĠAustral ia", - "ĠJan uary", - "³ ´", - "ag ues", - "ars h", - "r ig", - "Ġtien e", - "ภ£", - "Î ®", - "Ġmach en", - "un te", - "Ñĥ Ñģ", - "Ġelect r", - "Ġtut orial", - "Ġpl aced", - "ĠìĿ´ ê±°", - "ĠCoun cil", - "í ĸĪ", - "°ë ¦¬", - "ah ren", - "Ġê·¸ë ŀĺ", - "Ġpro ve", - "f ol", - "Ġqu er", - "Ġche ap", - "ĠF ather", - "ĠP ower", - "ĵ ľ", - "Ġpur s", - "Ġes p", - "ĠB re", - "ê¸ °ë", - "om as", - "æĥ ³", - "ил ÑĮ", - "Ġge ht", - "os ter", - "ê³ ¼", - "Ġfil es", - "ĠÐ §", - "be ll", - "Ġwh om", - "Ġë ĺ", - "Ġex cellent", - "Ġdat ab", - "Ġg ö", - "Ġì§Ħ ì§ľ", - "Ġbelie f", - "j et", - "Ġj ack", - "Ġsw im", - "ri al", - "um in", - "a uc", - "Ġso ll", - "Ġess ential", - "íķĺ ëĬĶ", - "Ġev ol", - "cha ft", - "ain e", - "th let", - "Ġinc or", - "Ġreport s", - "Ġdefin ition", - "ke l", - "Ġcirc um", - "Ġprodu ced", - "Ġ× Ľ", - "ant ic", - "n et", - "Ġa ward", - "Ġd urch", - "Ġtrans p", - "Ġm ale", - "¦ ¬ë", - "Ġmo on", - "ĠGe orge", - "Ġfly ing", - "i ó", - "Ġs ources", - "Ġpl enty", - "ĠDem ocr", - "R O", - "Ġ 00", - "Ġsec ure", - "ĠB ir", - "ra in", - "Ġz ur", - "Ġeffic ient", - "Ġrepe at", - "Ġmethod s", - "Ġcal m", - "Ġdiscuss ed", - "ĠìŀĪ ëĬĶ", - "Ġser ver", - "an ie", - "ĠInst ead", - "Ġide al", - "Ġcon ven", - "Ġhop ing", - "ĠT or", - "Ġdep th", - "Ġhe aven", - "EN CE", - "Ġhab it", - "gr ad", - "Ġfl ag", - "Ġin e", - "Ġk h", - "ĠL I", - "Ġfac ing", - "ĠA U", - "ĠT im", - "Ġg em", - "ĠJ ul", - "Ġel a", - "iz za", - "Ġfe llow", - "Ġqu el", - "Ġsp oke", - "Ġcitiz ens", - "u ge", - "é ĥ½", - "Ġp ages", - "Ġf asc", - "Ġrelig ious", - "at en", - "Ġch apter", - "ĠV al", - "Ġcons ult", - "ĠM ill", - "g l", - "op er", - "Ġinf in", - "Ġmar riage", - "Ġmedic ine", - "Ġд в", - "Ġdog s", - "Ġinstr ument", - "ĠEx act", - "á n", - "Ġ20 21", - "Ġf er", - "Ġwe alth", - "Ġgr ade", - "Ñĭ Ñħ", - "Ġcr ime", - "Ġth read", - "Ġess a", - "Ġw ine", - "co hol", - "ph a", - "ภĩ", - "og ue", - "Ġins urance", - "arr ator", - "ĠSept ember", - "Ġv id", - "ĠSp irit", - "Ġg est", - "ĠRuss ian", - "Ġproper ties", - "Ġart icle", - "Ġunder neath", - "y er", - "Ġjo int", - "Ġrelative ly", - "Ġin ch", - "Ġdesp ite", - "ĠG ree", - "Ġclass ic", - "Ġsupport ing", - "Ġinst ruct", - "lus ive", - "Ġdi agn", - "æ Ĭ", - "Ġadminist ration", - "аб оÑĤ", - "ĠO pen", - "æīĢ 以", - "Ġп ок", - "Ġdoll ar", - "Ġconse qu", - "o ber", - "ĠGerm any", - "Ġter r", - "ĠQ U", - "ĠÐ ĵ", - "ç ¾", - "Ġstrong er", - "É Ļ", - "ĠÙ Ĭ", - "ĠiP hone", - "Ġfab ric", - "ü h", - "Ġen em", - "æ ¯", - "Ġsub t", - "E E", - "ond e", - "Ġcre w", - "Ġremo ved", - "Ġl ady", - "Ġpot entially", - "ĠÐĿ о", - "y al", - "Ġsym pt", - "Ġar my", - "Ġintrodu ced", - "t es", - "Ġaspect s", - "1 4", - "ĠL ou", - "Ġ )", - "Ġde ploy", - "p et", - "Ġh an", - "ĠW atch", - "Ġweap ons", - "Ġph en", - "Ġreg ister", - "Ġein fach", - "Ġsp ort", - "Ġbr idge", - "Ġin ner", - "Ġminim um", - "Ġw itness", - "Ġes o", - "Ġvill age", - "Ġown er", - "¦¬ ê³ł", - "Ġsc ream", - "il ed", - "Ġp itch", - "b ru", - "Ġadv ance", - "ä¸į æĺ¯", - "Ġsupp ose", - "ĠAt t", - "еÑĤ ÑģÑı", - "Ġdiffer ences", - "ak ed", - "Ġinter pret", - "à ¦", - "iend o", - "Ġabs ol", - "ĠбÑĥд еÑĤ", - "Ġë ²", - "Ġtri al", - "Ġthink s", - "ly ing", - "cept ion", - "ĠAfric an", - "Ġchem ical", - "Ġta pe", - "Ġconvers ations", - "Ġdistrib ution", - "t i", - "ĠA I", - "Ġfl ash", - "Ġunder stood", - "ĠGovern ment", - "å° ı", - "! ?", - "ĠS k", - "ê± °ë", - "ri er", - "T S", - "ĠAcc ording", - "Ñİ ÑĤ", - "Ġsp ons", - "ÑĤ обÑĭ", - "Ġval u", - "ere m", - "icht ig", - "Ġresist ance", - "ĠG al", - "ger y", - "Ġbeg ins", - "Ġadv anced", - "Ġrele vant", - "Ġpolit ics", - "ĠF am", - "Ġç ok", - "ĠN ever", - "ill ing", - "Ġfoot ball", - "и и", - "ĠI D", - "ĠAfric a", - "Ġfing ers", - "Ġб олÑĮ", - "Ġà ¡", - "Ġcl ip", - "ĠL at", - "ãĤ Ħ", - "Ġì§Ģ ê¸Ī", - "es se", - "Ġvo or", - "Ġas ide", - "æ ŀ", - "Ġto ward", - "Ġb at", - "Ġval id", - "ĠM ens", - "Ġcomplet ed", - "ı ÄŁ", - "Ġpod cast", - "ĠB on", - "Û Ĵ", - "ĠJ uly", - "il a", - "Ġpack age", - "Ġpull ed", - "ch ar", - "ĠM el", - "o is", - "Ġs outh", - "Ġë Ķ", - "Ġimport ance", - "Ġp ushing", - "Ġis ol", - "Ġstand s", - "c ill", - "ä ¼", - "Ġ ðŁ", - "or i", - "ê° ģ", - "Ġhom es", - "Ġconcern s", - "Ġb iz", - "å ½", - "b ie", - "Ġb is", - "Ġge ar", - "ĠM S", - "Ġh un", - "ĠM att", - "Ạ£", - "se y", - "ĠSec ret", - "Ġod d", - "ĠM ax", - "oll y", - "f ord", - "ĠS H", - "Ġrepl ace", - "Ġnav ig", - "Ġin i", - "и Ñı", - "Ġgi ant", - "Ġma nd", - "ĠH app", - "TI ON", - "g un", - "iam o", - "ìŀħ ëĭĪëĭ¤", - "Ġg ap", - "Ġê tre", - "Ġclass room", - "Ġhy p", - "ak i", - "è ®", - "is ters", - "ack s", - "ĠÑģ о", - "Ġb ug", - "Ġgra v", - "am in", - "Ġevery day", - "Ġì ¡°", - "Ġgard en", - "ce mber", - "Ġest o", - "åĹ İ", - "Ø ¬", - "Ł °", - "å ģ", - "Ġr om", - "Ġìłľ ê°Ģ", - "Ġfall ing", - "Ġfa ult", - "ell y", - "Ġch est", - "Ġл и", - "Ġpot ato", - "Ġbuild ings", - "Ġoper ating", - "Ġp are", - "w r", - "D on", - "ĠF our", - "Ġv ul", - "Ġl á", - "Ġfr ust", - "ĠD ann", - "ol es", - "ny a", - "Ġì ¶", - "ĠÑĢ аÑģ", - "× Ľ", - "Ġa ÃŃ", - "w ord", - "Ġweap on", - "Ġob t", - "ĠF all", - "ĠSte ve", - "Ġmix ed", - "Ġp ode", - "ĠA S", - "ĠL eg", - "Ġdes c", - "Ġspl it", - "Ġemer gency", - "ĠS ing", - "Ġprof it", - "Ġtyp ical", - "ĠDon c", - "Ġannoun ce", - "ĠTe x", - "Ġsac r", - "tern al", - "Ġcomm ittee", - "ig o", - "Ġdi am", - "ph as", - "Ġdef e", - "ĠProf ess", - "Ġdec l", - "Ñĥ ÑĢ", - "2 2", - "ol f", - "ĠM ond", - "u y", - "Ġa y", - "Ġl em", - "Ġlove ly", - "ĠC ould", - "Ġgu ar", - "H H", - "Ġcare fully", - "ĠL isten", - "Ġк ÑĢ", - "Ġyou th", - "ĠThere fore", - "Ġdream s", - "ĠJe ff", - "? ]", - "Ġë Ī", - "D A", - "Ġb odies", - "au x", - "Ġtechn iques", - "Ġmechan ism", - "× ĵ", - "Ġо ни", - "Ġdes ire", - "à ®", - "ĠV o", - "qu es", - "ĠÑĥ же", - "ĠWho a", - "ĠG ame", - "Ġh al", - "an ish", - "Ġpract ices", - "5 00", - "Ġsort s", - "up s", - "ate ful", - "Ġhers elf", - "Ġgu itar", - "Ġprop os", - "Ġsit es", - "Ġbe ach", - "Ġ× ¢", - "ç¬ ¬", - "н Ñĥ", - "Ġdr am", - "ĠNo ve", - "V E", - "r ant", - "Ġpl ot", - "ĠìŬ 기", - "ĠC a", - "Ġestab lished", - "Ġ201 5", - "Ġinsp ired", - "Ġannoun ced", - "ä¸ ª", - "ĠÑĤ ÑĢ", - "Ġ2 6", - "Ġv oy", - "Ġte ch", - "ìł ģ", - "Ġprocess es", - "ont o", - "ĠP an", - "Ġrap id", - "ist an", - "Ġ19 7", - "Ġrelig ion", - "Ġ2 8", - "Ġsm ile", - "Ġb ab", - "Ġ Ú©", - "ĠV ir", - "Ġsched ule", - "Ġexec ut", - "Ġpr on", - "Ñ į", - "ĠÐĿ Ñĥ", - "m usic", - "ìĽ IJ", - "Ġg an", - "ìĭ ł", - "Ġdef ault", - "Ġbe m", - "Ù ī", - "Ġfor ced", - "ĠOb viously", - "Ġst one", - "Ġt ie", - "Ġdrink ing", - "Ġser ved", - "C ause", - "Ġcon ference", - "ĠExact ly", - "ãĥ Ī", - "ł ľ", - "ìĻ Ģ", - "ĠR a", - "Ġf ake", - "Ġdif f", - "ãģ ©", - "Ġchalleng ing", - "Ġì¤ ij", - "Ï ĩ", - "ä»Ģ 麼", - "Ġintellig ence", - "re te", - "Ġstud ying", - "Ġapp oint", - "Ġt an", - "Ġи м", - "Ġcur ve", - "ĠTe am", - "ĠA z", - "Ġз д", - "ĠMus ic", - "f ield", - "ir ation", - "Ġfail ed", - "Ġno vel", - "Ġdifferent ly", - "Ġes cape", - "ĠY o", - "ĠOct ober", - "ı yor", - "Ġdescri bed", - "Ġcon vert", - "ac ement", - "Ġhot el", - "is ation", - "Ġsu is", - "ãģ ij", - "å ŃIJ", - "æĢ İ", - "Ġwalk ed", - "2 00", - "Ġneighbor hood", - "is p", - "ĠL os", - "Ġh idden", - "Ġ2 7", - "л е", - "Ġph r", - "ĠIs land", - "ĠSt reet", - "end a", - "hip s", - "os ure", - "Ġdefin ed", - "ภ§", - "Ġv ida", - "Ġlab el", - "ĠEvery body", - "Ġjo ke", - "ia o", - "ا ÙĨ", - "Ġa thlet", - "... \"", - "ĠF ire", - "D o", - "Ġdef ense", - "Ġent ertain", - "á t", - "Ġpolic ies", - "Ġal cohol", - "ĠEng ine", - "Ġg al", - "ĠJ ud", - "Ġvol unte", - "ick s", - "et a", - "ag t", - "Ġ× ķ", - "Ġm ö", - "1 3", - "Ġenc oun", - "Ġe h", - "Ġor ange", - "Ġabs or", - "Ġsp aces", - "ĠNove mber", - "êµ ¬", - "i at", - "Ġt am", - "ck now", - "Ġst orm", - "ĠDire ctor", - "Ġpre gn", - "ĠìĿ ¼", - "Ġо п", - "Ġres ource", - "Ġb ard", - "ne w", - "ĠDe cember", - "u its", - "Ġwe il", - "Ġconst ruct", - "s i", - "n ic", - "Ġfl our", - "Ġrest rict", - "ü t", - "Ġentire ly", - "Ġbreak ing", - "ent lich", - "Ġtw enty", - "Ġcaus es", - "Ġele v", - "ĠS pr", - "ĠIntern et", - "Ġk iss", - "Ġoper ations", - "s zy", - "Ġë Ĭ", - "Ġscient ists", - "Ġgr own", - "Ġown ers", - "out s", - "Ġcour ses", - "Ġus ual", - "Ġin n", - "Ġtrans m", - "ñ o", - "Ġnu est", - "к ов", - "Ġcateg ory", - "ĠL ife", - "ĠPl us", - "Ġat mos", - "wh ile", - "Ġrecord s", - "Ġde ÄŁ", - "ëĭ¤ ê³ł", - "ĠìĤ¬ë ŀ", - "Ġrequire ments", - "in n", - "Ġimm ig", - "Ġdeep er", - "ç ´", - "Ġapp s", - "Ġcolle agues", - "ż y", - "Ġoff ers", - "Ġt á", - "Ġcolum n", - "la ud", - "I R", - "ĠM s", - "Ġexch ange", - "l as", - "ĠL aw", - "ĠJ on", - "is se", - "ro gen", - "Ġmo i", - "× Ĺ", - "Ġs ending", - "Ġhe llo", - "е е", - "ÅĽ Äĩ", - "Ġsuc ceed", - "Ġsuff ering", - "Ġad vert", - "Ġì£ ¼", - "çŁ¥ éģĵ", - "Ġrec o", - "ın ı", - "Ġк ом", - "all ey", - "Ġfail ure", - "ie j", - "Ġëķ Į", - "Ġdrug s", - "Ġcu ando", - "Ġìĸ´ë ĸ", - "ĠAb out", - "Ġqu ando", - "9 0", - "ĠF ed", - "1 7", - "S h", - "in ho", - "ĠSund ay", - "ĠPh il", - "Ġacad emic", - "ĠIn c", - "Ġmaint en", - "åĩ º", - "Ġre ward", - "er d", - "Ġcomm itted", - "ìĬ ¤", - "г ÑĢ", - "Ġstand ards", - "Ġk al", - "Ġint ention", - "ĠZ h", - "Ġa cknow", - "ä ¿", - "Ġ== =", - "og y", - "å §", - "Ġfilm s", - "is k", - "Ġte eth", - "Ġstrugg le", - "r d", - "u en", - "Ġdis s", - "ĠD ar", - "am y", - "Ġenem ies", - "Ġve loc", - "ĠC all", - "um bs", - "иÑĤ елÑĮ", - "Ġo cean", - "é d", - "ìļ °", - "Ġtre m", - "ient o", - "еÑĪ ÑĮ", - "ffic ient", - "Ġbott le", - "Ġinstit ution", - "est y", - "ĠH an", - "h ab", - "ëĬ ĺ", - "Ġar rest", - "éĤ Ħ", - "Ġlet ters", - "oun ce", - "í Į", - "A n", - "Ġcreat es", - "Ġcl ock", - "Ġdeb t", - "Ġan cient", - "ific ations", - "g i", - "B ut", - "ĠT u", - "k l", - "Ġb order", - "Ġo ok", - "ĠB ay", - "est a", - "Ġë³ ´ì", - "Ġw ra", - "pre ne", - "Ġê² Į", - "ang le", - "Ġbelie ved", - "ien cy", - "ak a", - "Ġcrit ic", - "Ġb omb", - "Ġha m", - "ĠÐ Ľ", - "êµ Ń", - "ĠGu ys", - "ros oft", - "Ġcr im", - "et ch", - "AR R", - "Ġs ight", - "и на", - "Ġa in", - "á» ij", - "is che", - "Ġau x", - "Ġnum er", - "Ġsurv ive", - "A ll", - "B C", - "Ġs z", - "Ł ¬ë", - "Ġj am", - "ĠCour t", - "Ġall es", - "Ġtr igger", - "Ð ŀ", - "Ġform at", - "Ġdec ades", - "Ġc es", - "Ġsign s", - "Ġrob ot", - "ĠCh urch", - "Ġa z", - "Ġs oup", - "ĠTex as", - "ut en", - "ĠÑĩ ÑĤобÑĭ", - "Ġneigh b", - "ĸ ×Ķ", - "Ġcommunic ate", - "Å ¡", - "Ġel imin", - "Ġfrequ ency", - "her n", - "id os", - "Ġem phas", - "Ġmess ages", - "Ġg ender", - "ĠW enn", - "Ġв о", - "Ġpr ices", - "ol o", - "Ġп он", - "w ing", - "ĠF il", - "а ем", - "ĠC ur", - "Ġfal se", - "Ġfield s", - "Ġs é", - "2 4", - "Ġm ac", - "u ÅŁ", - "Ġlay ers", - "Ġadv oc", - "w an", - "Ġk ar", - "ĠÅ ŀ", - "Ġdec or", - "Ġwall s", - "o e", - "iss ions", - "Ġres ol", - "× ¢", - "ĠCar ol", - "ĠV ide", - "le ep", - "ĠY OU", - "Ġfl ip", - "Ġsur gery", - "Ġch op", - "U R", - ". ,", - "Ġag ency", - "Ġwant ing", - "Ġsol ar", - "Ġhor iz", - "ĠAd am", - "Ġstay ing", - "ol ic", - "Ġgr ateful", - "Ġrem ark", - "Ġtechn ologies", - "Ġprote in", - "å¿ ĥ", - "д ел", - "ĠM ont", - "Ġshould er", - "Ġz a", - "re y", - "ĠO oh", - "Ġst y", - "ic ar", - "оÑĤ ÑĢ", - "Ġrout e", - "ĠT urn", - "Ġb om", - "Ġdeb ate", - "Ġposs ibility", - "Ġíķ ´ì", - "ap a", - "Ġinv ent", - "ür lich", - "Ġprof ile", - "Ġsen ior", - "pp y", - "v as", - "Ġm undo", - "ate ver", - "Ġapp arently", - "en er", - "× IJ", - "ç Ń", - "Ġprec is", - "Ġal ign", - "Ġkn ife", - "ĠRo bert", - "å ĭ", - "Ġfo ol", - "Ġinv ite", - "us ing", - "Ġcircum st", - "Ġcapt ure", - "Ġd ough", - "ĠS and", - "Ġse u", - "ĠNew s", - "Ġb ite", - "Ġne ut", - "w ide", - "Ġlect ure", - "Ġëĺ IJ", - "Ġorigin ally", - "Ġcho ices", - "ĠG ar", - "Ġver se", - "Ġl it", - "Ġ19 6", - "íķ ł", - "Ġmeas ures", - "ç ões", - "w ater", - "ri ve", - "Ġz ijn", - "í ģ", - "ĠB us", - "Ġhe b", - "е Ñħ", - "ĠK ar", - "ĠN ão", - "Ġkill ing", - "à® ª", - "Ġmir ror", - "m od", - "Ġm ol", - "Ġcre ation", - "Ġest im", - "Ġatmos phere", - "Ġg am", - "Ġt ables", - "is i", - "ĠL ittle", - "Ġt as", - "ĠE le", - "é l", - "Ġscen es", - "Ġt one", - "Ġaffect ed", - "ĠAU DI", - "ĠBr own", - "I f", - "ĠÙ ĩ", - "ĠDan iel", - "羣 çļĦ", - "qu er", - "ch i", - "íķ ĺë", - "Ġmist akes", - "Ġs la", - "ãĤ ¤", - "Ġent r", - "Ġе Ñģли", - "Ġsh out", - "Ġport ion", - "Ñ Ĺ", - "Ġpre viously", - "á» Ļ", - "ĠпÑĢ ед", - "оÑģ ÑĮ", - "Ġhead s", - "ç İ", - "å Ń", - "åľ ĭ", - "Ġgr ass", - "ภ°", - "cri be", - "Ġqu é", - "ĠSp anish", - "Ġoffer ed", - "ĠбÑĭ ло", - "ĠCl oud", - "Ġve ctor", - "ĠH uh", - "Ġk ad", - "if ts", - "ĠÎ ½", - "Ġhung ry", - "Ð ¡", - "Ġpar all", - "AN D", - "ĠvÃŃde o", - "iz z", - "Ġocc up", - "Ġí Ķ", - "Ġsee k", - "h es", - "Ġdo ors", - "Ġhous es", - "Ġconsider ing", - "Ġgradu ate", - "Ġf ulf", - "è ¡Į", - "è £", - "Ġext reme", - "Ġflow ers", - "it ate", - "ĠP ri", - "Ġfundament al", - "Ñĩ аÑģ", - "è¯ ´", - "Ġtext ure", - "į ĺ", - "ĠAN D", - "à® ±", - "ĠT em", - "Ġn ada", - "ì§ Ħ", - "Ġcelebr ate", - "um s", - "Ġp ill", - "Ġи ли", - "go ing", - "Ġh ip", - "Ġsupport ed", - "Ġper man", - "Ġagre ement", - "Ġty m", - "Ġë ij", - "ĵ¤ ìĿ´", - "Ġpurch ase", - "í Ķ", - "ĠPl an", - "eg en", - "Ġrec over", - "P U", - "ĠMic rosoft", - "du c", - "Ġhol es", - "Ġdro pped", - "Ġp ig", - "Ġend ing", - "Ġattack s", - "be c", - "Ġre n", - "Ġr app", - "Ġìļ °ë¦¬", - "Ġter ror", - "Ġ× Ļ", - "Ġed it", - "Ġa o", - ". ", - "Ġhero es", - "ĠB oston", - "Ġdepend ent", - "Ġmotiv ation", - "fl ix", - "Ġse am", - "ки е", - "Ġdra in", - "od ed", - "Ġgu ilty", - "ĠJ enn", - "ing en", - "Ġgrant ed", - "ĠK elly", - "ĠS av", - "ĠUn cle", - "ĠHon estly", - "EL I", - "Ġnavig ate", - "Ġbless ed", - "c ore", - "Ġear ning", - "Ġsign als", - "Ġdis k", - "ial s", - "Ġag es", - "æ ħ", - "Ġpartic le", - "ĠÑĩ еÑĢ", - "Ġcan n", - "Ġt ier", - "Ġstat ements", - "ê³ł ìļĶ", - "ĠëķĮ문 ìĹIJ", - "ĠCh o", - "Ġpol ar", - "an ç", - "ĠK enn", - "ĠN i", - "ĠF ight", - "or gan", - "é ķ", - "ĠCh a", - "ĠS ÃŃ", - "ãĥ ª", - "Ġs lic", - "Ġcert ific", - "Ġtempl ate", - "ĠFed eral", - "Ġconsider ation", - "Ġexpl o", - "ĠM ain", - "ĠN E", - "Ġalong side", - "Ġd ressed", - "ĠP oint", - "Ġenviron ments", - "Ġpró xim", - "Ġda ar", - "Ġprom pt", - "Ġpurs ue", - "Ġentertain ment", - "Ġth roat", - "Ġproblem a", - "Ġm art", - "ì ¼", - "Ġprov ider", - "Ø Į", - "Ġ× Ĺ", - "int e", - "m aking", - "Ġstro ke", - "Ġtiss ue", - "U n", - "Ġpre cious", - "ĠAr ts", - "ink ing", - "ĠÐŀ н", - "Ġи Ñģ", - "n ah", - "ĠÐķ Ñģли", - "Ġcor ners", - "Ġtrick y", - "in ch", - "l ijk", - "Ġpress ing", - "le vel", - "AN G", - "Ġrad iation", - "ìĦ ł", - "Ġconf ront", - "Ġv et", - "Ġrepresent ative", - "Ġprop ag", - "Ġcra p", - "ĠDe c", - "Ġr amp", - "еп еÑĢÑĮ", - "u és", - "ess en", - "cri ption", - "Ġb ills", - "ĠMatth ew", - "Ġan ime", - "ấ t", - "Ġlow est", - "h as", - "sc reen", - "og rap", - "ал о", - "int on", - "ĠJ ah", - "èĢ ħ", - "it Ãł", - "Ġk ay", - "Ġrot ation", - "ĠW ere", - "abe i", - "Ġtri als", - "Ġle ver", - "ight y", - "Ġsp oon", - "Ġh unt", - "c ling", - "Ġdis m", - "ĠболÑĮ ÑĪ", - "Ġass ault", - "Ġíĺ ķ", - "Ġweek ly", - "Ġm ismo", - "Ġgen etic", - "ul pt", - "ĠStud ent", - "Ġreal istic", - "Ġauthent ic", - "æī ĵ", - "ast a", - "Ġarrest ed", - "Ġguid elines", - "Ġ×ľ× IJ", - "Ġд ав", - "ĠCom ing", - "f ür", - "Ġrequ ests", - "ĥ IJ", - "Ġanaly ze", - "Ġinter ess", - "Ġh alt", - "ĠO per", - "on om", - "Ġd uck", - "Ġwith d", - "s er", - "ĠÏ Į", - "ĠHist ory", - "Ġyout ube", - "ãĤ į", - "Ġsab er", - "w alk", - "f ont", - "Ġover view", - "3 9", - "ü y", - "ett i", - "Ġfro zen", - "Ġf lesh", - "ÄŁ i", - "ĠP M", - "ĠìĻ Ģ", - "é ¢", - "ÑĨи и", - "Ġê¸ °ë", - "íģ ¬", - "Ġpr ose", - "oo oo", - "r ates", - "W S", - "Ġautom atic", - "Ġcollect ing", - "Å ij", - "Ġneighb ors", - "» .", - "ĠEx pl", - "Ġcir cul", - "co ver", - "we g", - "Ġstick s", - "Ġe ller", - "Ġw ww", - "Ġd orm", - "ĠEx per", - "Ġstat istics", - "Ġemail s", - "Ġgra ve", - "im iz", - "H S", - "Ġu it", - ", '", - "Ġlas er", - "è ī", - "ĠÑĤ ем", - "Ñĭ ÑĪ", - "Ñī Ñij", - "Ġgen au", - "Ġtien en", - "Ġmed itation", - "ĠOr gan", - "Ġest imate", - "Ġë¬ ´ì", - "l ets", - "Ġn Ãły", - "Ġmind set", - "Ġres on", - "Ġm és", - "Ġnumer ous", - "Ġvie lleicht", - "ĠTh ird", - "u ous", - "ĠDe ad", - "ан д", - "H N", - "Ġrac ing", - "Ġag ents", - "ĠU t", - "Ġte ar", - "ĠH P", - "Ġchem istry", - "Ġsurv ival", - "æĸ °", - "Ġconvin ced", - "Ġ ;", - "Ġreg ulations", - "ĠE S", - "åĴ Į", - "3 00", - "Ġen se", - "Ġì µ", - "Ġd ict", - "G A", - "Ġah ÃŃ", - "åĭ ķ", - "Ġte j", - "Ġо ÑģÑĤ", - "ĠE lect", - "Ġintellect ual", - "Ġbi as", - "Ġbur den", - "çĤ ¹", - "Ġìĸ´ëĸ »", - "Ġche er", - "Ġso ph", - "Ġportfol io", - "ub a", - "Ġest os", - "T V", - "F or", - "Ġas h", - "Ġkom mer", - "Ġcollect ive", - "Ġw rest", - "ĠJ etzt", - "ĠW at", - "re ich", - "Ġprim er", - "act ive", - "Ġm ie", - "ick ed", - "Ġhun ting", - "Ġtest im", - "Ġcompass ion", - "ĠØ ±", - "Ġbr ut", - "Ġsal ad", - "об Ñīе", - "Ġsol ving", - "Ġflo ating", - "ç ·", - "Ġattract ive", - "ÙĪ ÙĦ", - "Ġper d", - "if fer", - "Ġsc ulpt", - "hh h", - "ĠWe ek", - "Ġent hus", - "Ġn ad", - "Ġmer ch", - "ĠíĻ ķ", - "Ġm ile", - "好 äºĨ", - "ĠÎ ¸", - "ĠëĤ ĺë", - "éĩ į", - "3 8", - "Ġch ains", - "ĠAl most", - "Ġtick ets", - "r in", - "ĠC C", - "Ġdistrib uted", - "abet es", - "Ġtemper atures", - "Ġg ained", - "Ġflex ibility", - "Ġscream ing", - "Ġab road", - "un o", - "Ġentreprene urs", - "ĠNet work", - "ĠCanad ian", - "Ġpre v", - "Ġs ö", - "ĠÑĤеб Ñı", - "ĠP oke", - "ĠP od", - "ĠTur key", - "çı¾ åľ¨", - "Ġabst ract", - "Ġsn ake", - "ĠAm y", - "ĠëĬIJëĤ Į", - "Ġbra ve", - "ĠìŀĪ ìĸ´ìļĶ", - "ĠK al", - "Ġ200 7", - "á rio", - "Ġmark ed", - "gin es", - "Ġall oc", - "ON G", - "Ġscient ist", - "Ġes ca", - "Ġrac ism", - "× ij×", - "ĠS ams", - "ĠP enn", - "Ġload s", - "Ġà® ¨", - "ü ber", - "M e", - "ix ò", - "Ġper ò", - "an ne", - "Ġexp ressed", - "м еÑĢ", - "Ġmo et", - "Ġret urning", - "n ia", - "Ġexp on", - "P ro", - "Ġlo yal", - "M L", - "Ġl amp", - "Ġsh y", - "Ġcomp osition", - "ĠL y", - "Ġmagn etic", - "Ġprem ier", - "Ġmeasure d", - "Ġsumm ary", - "Ġattack ed", - "Ġfin ishing", - "Ð Ĺ", - "ç ¥", - "Ġs its", - "Ġhyd rogen", - "Ġma i", - "ĠDeuts ch", - "as ı", - "Ġobt ain", - "v ie", - "Ġso it", - "Ġë° Ķ", - "Ġl ane", - "Ġconse gu", - "в о", - "Ġe ase", - "ak in", - "ĠF a", - "Ġunt uk", - "Ġbur st", - "Ġc um", - "al ım", - "ú blic", - "id i", - "ĠRoy al", - "ĠK on", - "Ġcommon ly", - "Ġremo ving", - "Ġj ur", - "il ib", - "Ġan ch", - "íĸ ī", - "Æ°á» £", - "ĠÐľ Ñĭ", - "ĠAn th", - "ĠS Ã¥", - "Ġinter rupt", - "Ġst ere", - "ĠO S", - "ony m", - "ter y", - "ĠMar ia", - "ê² ĥ", - "Ġexpl oring", - "Ġtransp arent", - "Ġf ate", - "ĠJ ung", - "Ġgr up", - "Ġdark er", - "ĠD oug", - "Ġman e", - "æĶ ¾", - "ạ i", - "d ri", - "lo ok", - "ĠDes ign", - "Ġtut aj", - "Ġhorizont al", - "re on", - "ort e", - "ĠCor rect", - "ĠSte ven", - "Ġv ine", - "0 2", - "i Äĩ", - "Ġsie mpre", - "ĠK ey", - "åĥ ı", - "ĠG ames", - "Ġna ar", - "Ġshock ed", - "el ve", - "ĠR ose", - "ìĭ ¬", - "Ġstop ping", - "oh l", - "ĠM ix", - "Ġsuff ered", - "Ġsig ma", - "Ġweak ness", - "ĠO w", - "ี à¹Ī", - "I F", - "Ġà® ħ", - "ad ed", - "ĠNet flix", - "an es", - "Ġrem ained", - "ir y", - "Ġr ip", - "ell t", - "Ġsil ent", - "Ġpro ven", - "Ġtox ic", - "Ġal umin", - "Ġmulti pl", - "al and", - "Ġ3 4", - "0 6", - "ĠB ru", - "Ġìłķ ë§IJ", - "J ust", - "b oy", - "Ġsho e", - "Ġcreat ure", - "Ġhead ed", - "ĠоÑĤ к", - "æ ±", - "Ġess ence", - "Ġremark able", - "Ġnú mer", - "Ġd rew", - "Ġpu zzle", - "ĠLibr ary", - "ĠF u", - "ash es", - "k k", - "ĠI st", - "¦ °", - "ĠB ry", - "Ġc eremony", - "Ġà® İ", - "Ġc ri", - "e qu", - "ãĤ ¢", - "Ġpri ze", - "Ġdim ensions", - "og ram", - "Ġle ather", - "Ġpop ulations", - "u um", - "Ġve gan", - "Ñı д", - "Ġcó mo", - "å Ħ", - "Ġstri p", - "å £", - "Ġvac ation", - "ħ ķ", - "Ġme als", - "ili pp", - "Ġ ents", - "ar am", - "ric ht", - "Ġgra in", - "ĠSp ain", - "Ġche ek", - "ĠA ff", - "I ON", - "ĠBr ing", - "Ġ3 8", - "iel en", - "ul u", - "ĠболÑĮ ÑĪе", - "Ġannounce ment", - "ĠÑĤ ÑĥÑĤ", - "ĠPro phet", - "ard o", - "3 7", - "Ġw oke", - "Ġtransl ation", - "ĠN OT", - "ĠC L", - "Ġd Ã¼ÅŁ", - "ÑĨ Ñĸ", - "ac er", - "ĠL oc", - "Ġper ception", - "N O", - "Ġdies en", - "L ook", - "he art", - "av ed", - "Ġbound ary", - "Ġfl ows", - "Ñij м", - "Ġarg uments", - "Ġelect ions", - "ı s", - "Ġhe ck", - "Ġsuit able", - "Ġf iber", - "ĠSt ra", - "x y", - "ĠH um", - "Ġmonth ly", - "u per", - "Ġgol f", - "Ġl ately", - "ĠG ard", - "ĠR en", - "ĠA st", - "ĠF ant", - "аÑģ Ñģ", - "Ġobs er", - "ë ¡ľ", - "Ġeas iest", - "į Ķë", - "Ġwebs ites", - "p ol", - "Ġco con", - "Ġà® ĩ", - "ĠV eg", - "Ġwalk s", - "Ġint ro", - "Ġdirect ed", - "ĠAn na", - "Ġëĵ¤ ìĸ´", - "ĠEaster n", - "ĠS aint", - "ĠB ow", - "Ġro ast", - "ĠU RL", - "Ġjed en", - "ur as", - "aj a", - "Ġse mi", - "Ġrapid ly", - "Ġtarget s", - "ĠCont rol", - "Ġb ah", - "Ġref lection", - "Ġcreat ivity", - "hold ers", - "Ġìĺ ¬ë", - "Ġamong st", - "Ġfeed ing", - "ÑįÑĤ омÑĥ", - "Ġвид е", - "Ġë§Įë ĵ¤", - "ĠSm art", - "Ġrel iable", - "Ġvez es", - "Ġ× ¨", - "ch uckles", - "az ione", - "ĠWilliam s", - "Ġa ç", - "Ġsle e", - "е Ñī", - "Ġtim eline", - "Ġthor ough", - "á» į", - "ĠO t", - "ạ n", - "Ġimag ination", - "Ġmechan ics", - "r ist", - "Ġclaim ed", - "ÏĦ η", - "ê te", - "ĠHur ry", - "ĠiP ad", - "Ġconst ru", - "ĠC la", - "ĠAl s", - "ä¼ ļ", - "ut z", - "Ġcult ures", - "Ġìĸ´ëĸ» ê²Į", - "Ġbelong s", - "Ġy er", - "ĠDoes n", - "Ġge omet", - "Ġb id", - "Ġfo am", - "Ġh ob", - "ĠBrit ain", - "Ġsubst ance", - "Ġann iversary", - "ĠëĦ Ī", - "Ġnot ed", - "Ġgovern or", - "Ġstock s", - "3 1", - "Ġdi ye", - "ìĬ ¤ë", - "Ġre b", - "z el", - "Ġmultip ly", - "Ġoper ator", - "Ħ¤ ìļĶ", - "Ġwat ers", - "Ġd är", - "Ġuns er", - "ĠEliz abeth", - "é« ĺ", - "Ġincreasing ly", - "ĠG ro", - "Ġen gines", - "ir s", - "Ø «", - "Ġtre asure", - "P C", - "in ction", - "ir i", - "Ġacc um", - "Ġvari ation", - "Ġp om", - "Ġtit les", - "ĠF est", - "ó s", - "Ġeld er", - "ny m", - "r un", - "Ñı в", - "Ġinnov ative", - "Ġnom bre", - "Ġco inc", - "Ġfr anch", - "Ġent onces", - "Ġnicht s", - "Ġexc lusive", - "ĠChe ers", - "ĠB i", - "u je", - "æŃ ¡", - "Ġp ok", - "ĠP rem", - "Ġrock et", - "ELI PE", - "Ġhosp itals", - "ri um", - "Ġjust e", - "Ġham mer", - "Ġquant um", - "Ġrespons es", - "ll y", - "end i", - "Ġact ively", - "Ġfr idge", - "i ate", - "l ong", - "Ġqu em", - "Ġdeath s", - "Ġsuper ior", - "ck en", - "ìĿ´ì ĹIJ", - "kt op", - "Ġgather ed", - "£ ¨", - "Ġd azu", - "Ġreci pes", - "Ġbu zz", - "c en", - "Ġany time", - "ons ense", - "Ġcirc les", - "Ġsol ved", - "Ġìĭ ł", - "Ġcoron avirus", - "ĠLu ke", - "Ġbu bb", - "Ġcont empor", - "r zy", - "ĠJ ane", - "Ġд ом", - "Ġscrew s", - "Ġhy brid", - "Ġcas ual", - "Ġsel bst", - "be ing", - "ĠÄ IJ", - "ĠCol umb", - "ĠÑħ оÑĩ", - "Ġbu cket", - "Ġevalu ate", - "Ġid ol", - "Ġrep utation", - "ĠìĨ Įë", - "ÙĪ ر", - "Ġhe cho", - "Ġpo em", - "Ġsubject s", - "pl ant", - "ĠBe h", - "ĠSpe aking", - "Ġbatter ies", - "Ġfollow ers", - "ö l", - "Ġg ently", - "Ġsi xt", - "Ġparam eter", - "Ġik ke", - "ĠT our", - "ĠD J", - "ot te", - "ĠJ ahren", - "Ġprepar ation", - "Ġд Ñĥм", - "Ġ8 00", - "c op", - "ik ing", - "Ġë¬ ¸", - "Ġн Ñĥ", - "Ġл еÑĤ", - "åIJ Į", - "ĠI de", - "Ġì¡° ê¸Ī", - "Ġla ughter", - "Ġmole cules", - "ĠR est", - "Ġobs erved", - "d zie", - "Ġadvert ising", - "ert o", - "Ġmo ins", - "ĠM IT", - "Ġexc it", - "Ġt um", - "Ġty l", - "Ġinvest ed", - "Ġph arm", - "Ġunex pected", - "Ġph i", - "oty pe", - "we ise", - "Ġge ç", - "jour d", - "Ġhors es", - "n Äħ", - "= \"", - "ĠS M", - "Ġf ib", - "Ġcl ips", - "çķ ¶", - "å¦Ĥ æŀľ", - "Ġreg ime", - "Ġrot ate", - "r ou", - "n ik", - "Ġarm or", - "ðŁ ĺ", - "еÑĢ а", - "åº ¦", - "ĠO ch", - "Ġr ichtig", - "üz el", - "ane ously", - "m ek", - "éĮ ¯", - "ĠX iao", - "Ġexist ed", - "w orth", - "ãģ£ ãģ¨", - "Ġna ught", - "Ġhe iÃŁt", - "ĠB al", - "Ġres id", - "iv ot", - "om atic", - "Ġh ired", - "Ġgrad ually", - "Ġon ions", - "Ġcomp at", - "Ġint im", - "Ġj ew", - "Ġcontrib ution", - "ĠI re", - "ac ji", - "Ġsl ice", - "Ġimm un", - "ĠR us", - "Ġgr ows", - "ĠSimilar ly", - "Ġhard est", - "Ġst ruck", - "Ġmeasure ment", - "... ]", - "th ey", - "Ġìł Ģë", - "Ġsne ak", - "Ġappl ies", - "Ġн ем", - "æ ĵ", - "×ij ר", - "ĠЧ ÑĤо", - "Ġout ro", - "Ġinnoc ent", - "Ġm og", - "ĠSams ung", - "Ġmer cy", - "Ġhand ling", - "Ġinter vention", - "id ays", - "g ot", - "Ġcur ric", - "Ġbound aries", - "Ġconf using", - "Ŀ¼ ëĬĶ", - "æ ĩ", - "Ġstitch es", - "ÃŃ vel", - "Ġtun nel", - "it ä", - "Ġg ost", - "im y", - "Ġcz as", - "Ġm é", - "Ġcat al", - "ĠSim on", - "ĠLI AM", - "m ic", - "ĠÐ ¤", - "Ġey el", - "is as", - "ĠC PU", - "ĠD ou", - "Ġnä ch", - "Ġinfin ity", - "Ġr if", - "ĠPe ace", - "ĠC u", - "Ġminim al", - "Ġlisten ed", - "Ġpo le", - "hal b", - "Ġload ed", - "Ġste ady", - "ĠBes ides", - "ê m", - "Ġl ap", - "Ġco op", - "Ġfriends hip", - "w orld", - "Ġge h", - "Ġtyl ko", - "ĠLa ura", - "Ġsurround ed", - "ĠE vent", - "Ġch ap", - "ĠW onder", - "bre ak", - "Ġdro ve", - "Ġbroad er", - "Ġch i", - "F i", - "Ġge hen", - "Ġwest ern", - "Ġintellig ent", - "Ġpers ist", - "Ġfound ed", - "ãģĵ ãģ¨", - "Ġhistor ic", - "Ġfr Ã¥", - "cks Ã¥", - "Ġhand y", - "Ġsy mp", - "Ġr ows", - "Ġnut ri", - "b ur", - "ĠLe on", - "Ġsist ema", - "Ġext ensive", - "ĠÑĥ в", - "í ı", - "Ġnight s", - "Ġcá c", - "Ġcount ing", - "ĠM ust", - "all ow", - "еÑģ Ñģ", - "M om", - "Ġнад о", - "Ġbar rel", - "ãĥ ŀ", - "AR D", - "Ġinstall ation", - "Ġin sect", - "Ġëħ ¸ë", - "uj Äħ", - "ĠÄij i", - "Ġpack ed", - "Ġf iction", - "N ow", - "ĠY ay", - "Ġper t", - "r ons", - "und e", - "ach es", - "Ġsty les", - "Ġapr ès", - "ok u", - "ĠV ice", - "ın ız", - "com m", - "Ġassign ed", - "Ġinteract ions", - "Ġac ab", - "F ELIPE", - "Ġresc ue", - "Ġindust ries", - "ĠAnd y", - "Ġpra ise", - "Ġfl ame", - "Ġsn ack", - "í Ĥ", - "ç ģ", - "Ġsw o", - "rend er", - "Ġbo ards", - "ĠÑĤ ом", - "en ne", - "Ġpast a", - "Ġdev il", - "ĠF el", - "Ġhat te", - "Ġcoll eg", - "e h", - "ì »", - "ãģĵ ãģ®", - "Ġproduct ive", - "for ward", - "и п", - "Ġsmart phone", - "Ġinv is", - "Ġb um", - "Ġwho a", - "ìŀ Ħ", - "Ġo cksÃ¥", - "ĠL ang", - "ĠSy ria", - "Ġses i", - "ί α", - "Ġappro val", - "4 8", - "Ġод ин", - "Ġë ĸ", - "ĠH arr", - "ĠAd minist", - "Ġ× ¤", - "ĠDe an", - "f i", - "Ġcitiz en", - "Ġsh ark", - "0 5", - "Ġbo il", - "Ġindic ate", - "å ¡", - "A re", - "Ġlay out", - "Ġref r", - "ĠPac ific", - "AA AA", - "ĠAustral ian", - "g ression", - "V oice", - "ал ÑģÑı", - "Ġshel ter", - "T o", - "au pt", - "Ġevalu ation", - "ap or", - "Ġcur rency", - "Ġм ного", - "ig os", - "ãģ °", - "Ġo ct", - "Ġro yal", - "è ³", - "as il", - "ĠChild ren", - "Ġr ien", - "Ġë ĵľë", - "Ġbar rier", - "Ġej emplo", - "Ġe k", - "N D", - "es p", - "ен а", - "Ġp ic", - "Ġkill er", - "Ġintegr ate", - "Ġfew er", - "Ġdis abilities", - "Ġ ....", - "Ġtri angle", - "Ġfe es", - "Ġwid ely", - "em i", - "Ġoverwhel ming", - "Ġz omb", - "Ġb ere", - "Ġho od", - "ĠA ye", - "ĠHar vard", - "e v", - "ĠÏĦ οÏħ", - "Ġcup s", - "ĠA uch", - "z ona", - "Ġ199 0", - "Ġwe iÃŁ", - "Ġcr unch", - "æ ¥", - "Ġз ав", - "Ġmeas uring", - "Ġst ations", - "ĠStep hen", - "Ġshort ly", - "Ġsig ning", - "Ġcom edy", - "om o", - "Ġsuggest ions", - "Ġsign ature", - "ĠпÑĢ ив", - "Ġdis order", - "as ka", - "Ġworld s", - "Ġprecis ely", - "n orm", - "ra v", - "ĠC ivil", - "In ter", - "ĠC ertain", - "Ġinj ured", - "Ġsuggest s", - "ĠGold en", - "Ġcy ber", - "ĠØ ´", - "Ġtempor ary", - "Ġco oper", - "Ġvot ed", - "Ġ ought", - "ấ y", - "x ual", - "Ġpan els", - "Ġ9 5", - "Ġhands ome", - "ĠпÑĢ ов", - "Ġper mit", - "Ġke in", - "Ġbad ly", - "Ġnot ifications", - "iz a", - "ĠNot ice", - "Ġinc lusive", - "Ġanswer ing", - "Ġí Ĺ", - "u ld", - "íħ Į", - "Ġnow adays", - "Ġ3 7", - "Ġb olt", - "Ġstat ic", - "ĠH op", - "Ġav ant", - "aj o", - "Ġ맼 ìŀĪ", - "Ġfif ty", - "ĠF inal", - "Ġsc ores", - "ĠT ap", - "Ġcy l", - "Ġconv ince", - "Ġany ways", - "od a", - "Ġìķ ¼", - "Ġser ves", - "ĠÑĤак ой", - "ĠZo om", - "Ġsaving s", - "ul o", - "Ġs outhern", - "view er", - "Ġho je", - "Ġse ja", - "Ġrepresent ing", - "Īë įĺ", - "l ik", - "ĠSome body", - "Ġbe ast", - "Ġstick ing", - "Ġins ist", - "Ġtal ented", - "Ġexplain ing", - "Ġatt orney", - "éĥ ¨", - "Ġst airs", - "ĠD og", - "í ĭ", - "Ġc ig", - "Ġshap ed", - "Ġs ons", - "Ïģ ι", - "ut t", - "Ġì Ķ", - "Ġpar ad", - "ìĿ¸ë į°", - "Ġh orn", - "ĠJ our", - "ann o", - "Ġworld wide", - "åĬ Ľ", - "Ġparticip ation", - "¦ Ħ", - "Ġm ów", - "Ġburn ed", - "Ġwrit ers", - "all ah", - "ĠF und", - "Ġcle ver", - "ĠLe ute", - "b in", - "Ġbe ating", - "f oot", - "ĠìĽ IJ", - "ĠStud io", - "Ġv ag", - "be y", - "r ze", - "Ġoppos ition", - "Ġж из", - "w ho", - "Ġê± ´", - "Ġtr ace", - "Ġд енÑĮ", - "Ġep id", - "Ġges ch", - "ĠN ar", - "ĠB E", - "Ñĥ й", - "ĠS ign", - "ed ly", - "Ġcl ay", - "Ġinst antly", - "Ġgather ing", - "ĠGal axy", - "Ġb ored", - "ĠBudd h", - "c é", - "Ġm am", - "Ġsl ope", - "Ġëĭ¤ ìĿĮ", - "Ġsch ön", - "Ġp ir", - "ge f", - "am er", - "Ġh ö", - "Ġcolle ague", - "Ġpres ents", - "ad ium", - "Ġà® µ", - "Ġfal ar", - "be ep", - "Ġdri ed", - "ism s", - "Ġro pe", - "Ġworks hop", - "Ġest ud", - "Ġb ands", - "Ġthem es", - "åħ ¬", - "ÙĬ ر", - "åIJ İ", - "Ġremind er", - "ÑĤ Ñĥ", - "ĠB h", - "Ġcocon ut", - "ĠÑģ ÑĤо", - "ĠCh annel", - "Ġimmig ration", - "ä s", - ".. ...", - "ä¸ »", - "çĻ ½", - "st op", - "Ġк аÑĢ", - "Ġco ins", - "ĠÑĩ аÑģ", - "Ġdest ruction", - "l ined", - "Ġbar riers", - "ant ine", - "Ġprint ed", - "Ġcongrat ulations", - "ĠHe art", - "Ġin qu", - "th a", - "Ġhard ly", - "ĠA ven", - "Ġt inha", - "ĠS ony", - "ĠN F", - "Ġgradu ates", - "Ġsque eze", - "ere my", - "ÏĦ ι", - "Ġep ic", - "ĠJ u", - "Ġol m", - "ĠLa ughter", - "Ġbelief s", - "ĠC ru", - "ĠTr ue", - "ĠS oul", - "owe en", - "Ġrom antic", - "Ġз в", - "Ġan os", - "ĠY up", - "éĺ ¿", - "d im", - "Ġin fer", - "Ġз ам", - "Ġso c", - "uk a", - "Ġprec ise", - "Ġdro pping", - "Ġcl ue", - "Ġer rors", - "char ge", - "ĠP u", - "omet er", - "Ġlamb da", - "ac ional", - "ĠD ong", - "Ġcham ber", - "Ġthank ful", - "ĠN u", - "ĠHaw ai", - "Ġinf o", - "Ġactiv ate", - "ĠQ ual", - "Ġqu ed", - "Ñĥ лÑĮ", - "Ġcl oth", - "åĸ ľ", - "Ġw ichtig", - "5 5", - "Ġot ra", - "ograp her", - "Ġcur ios", - "Ġ19 80", - "Ġemp res", - "d ess", - "e ur", - "Ġcl uster", - "ar ter", - "ob ile", - "ĠY an", - "ĠAd v", - "Ġdiscipl ine", - "Ġìłķ ëıĦ", - "ĠPl ace", - "ĠSe lect", - "T E", - "ĠбÑĭ ла", - "Ġwh is", - "Ġb ay", - "ĠD or", - "en cing", - "Ġrep et", - "Ġf icar", - "p ad", - "Ġf og", - "u yor", - "Ġsn ap", - "ib t", - "Ġso bie", - "Ġappoint ment", - "ĠR y", - "Ġce iling", - "our se", - "Ġwr ites", - "ĠAfghan istan", - "Ġm os", - "az e", - "Ġpen al", - "Ġcry stal", - "IC E", - "ê° IJ", - "é Ł", - "ĠTes la", - "Ġthe ories", - "Ġappe al", - "Ġnewsp aper", - "Ġcook ies", - "æ ©", - "ĠاÙĦ ÙĦ", - "Ġma j", - "ĠGet ting", - "k ommen", - "ĠHe aven", - "ell s", - "Ġdiv ine", - "Ä «", - "Ġa kt", - "Ġhop es", - "ĠCh en", - "we gen", - "** *", - "ĠFra ge", - "Ġн и", - "ภ¹", - "min ister", - "nes ota", - "wh ich", - "Ġexpl icit", - "Ġverd ad", - "Ġgradu ated", - "ĠPh ilipp", - "Q L", - "ĠM I", - "Ġdev ot", - "Ġc ure", - "Ġclos est", - "Ġà Ħ", - "Ġsex y", - "ãģ Ľ", - "ĠDe ath", - "ok o", - "ug u", - "ĠAn ne", - "itar ian", - "es a", - "ег од", - "ĠD ur", - "Ġ 000", - "ze it", - "Ġtour nament", - "Ġmel hor", - "ภª", - "Ġin du", - "Ġf law", - "Ġw ars", - "ĠM ind", - "ĠI ron", - "ÑĤ ак", - "ĠV R", - "Ġs iz", - "ĠS outhern", - "Ġê·¸ëŁ ¬ë", - "Ġaw ak", - "Ġìķ ŀ", - "Ġc ube", - "believ able", - "if all", - "d is", - "Ġabandon ed", - "m ind", - "Ġpar l", - "Ġclass ical", - "è ĭ", - "á»Ļ t", - "ĠAut o", - "ĠB or", - "ç ©", - "4 00", - "ĠSoci ety", - "Ġsubt le", - "Ġmiss ions", - "Ġremember ed", - "ĠE ither", - "Ġda für", - "OR D", - "Ġint ensity", - "ES IN", - "ĠC up", - "Ġrare ly", - "Ġto ys", - "ĠChar lie", - "á» Ł", - "Ġgla ube", - "Ġround s", - "T IN", - "Ġcap ability", - "Ġderiv ative", - "Ġrefer ring", - "Ġd Ã¥", - "ĠT ALI", - "Ġcott on", - "Ġcon fer", - "Ġcolum ns", - "Ġliber al", - "Ġnun ca", - "Ġμ ε", - "Ġind o", - "ib en", - "ĠBe ispiel", - "Ġê·¸ë łĩ", - "ĠÑĥ Ñĩ", - "Ġh oy", - "Ġfr y", - "ĠScott ish", - "è Ĭ", - "Ġc iv", - "Ġconserv ative", - "Ġair pl", - "Ġs ar", - "r us", - "Ġinvest ments", - "Ġinfin ite", - "Ġà® ķ", - "ĠTALI ESIN", - "ĠG ary", - "ue ll", - "Ġа к", - "ĠC ir", - "Ġrit ual", - "Ġ>> >", - "Ġtem pt", - "ĠTe ch", - "ĠPoke mon", - "Ġimprove ments", - "Ġsp are", - "Ġtransl ate", - "Ġson ra", - "ĠFil m", - "w ort", - "Ġм и", - "Ġperiod s", - "Ġje alous", - "ãģĦ ãģĦ", - "Ġt ir", - "M I", - "Ġconduct ed", - "ĠìķĪë ħķ", - "0 9", - "ĠPol it", - "ĠWhere as", - "Ġmoist ure", - "Ġs ins", - "Ġk ap", - "ĠÑį к", - "Ġben im", - "Ġelimin ate", - "Ġathlet es", - "ĠMan ager", - "Ġfeature d", - "ap ore", - "äº Ľ", - "Ġë° ľ", - "Ġper f", - "ĠTh us", - "Ġdeb ut", - "об ÑĢ", - "Ġse ñ", - "Ġmyster ious", - "w ords", - "Ķ ê°Ģ", - "Ġcheck s", - "Ġvolunte er", - "Ġwas hing", - "ĠMar vel", - "ĠA B", - "iss ors", - "! '", - "ĠF ull", - "ye on", - "Ġwe igh", - "ĠJO HN", - "Ġv os", - "Ġproced ures", - "Ġaddress ed", - "ĠBer lin", - "put er", - "ĠB an", - "Ġmedic ation", - "Ġdr one", - "ĠÑĥ б", - "ĠJe an", - "Ġcap s", - "Ġdisappoint ed", - "Ġw ore", - "Ġêµ Ń", - "Ġorgan ize", - "ĠHall oween", - "Ġfant asy", - "y ard", - "Ġnos otros", - "Ġjump ed", - "Ġphot ography", - "ĠN ame", - "re c", - "A B", - "Ġbless ing", - "ĠSh ut", - "Ġbit ter", - "p op", - "ãģĿ ãĤĮ", - "Ġde i", - "Ġfulf ill", - "çIJ Ĩ", - "Ġden gan", - "Ġbe lo", - "ĠMean while", - "Ġdep ois", - "Ġdi abetes", - "Ġbu nd", - "ĠZe aland", - "Ġdig est", - "Ġt ires", - "Ġdo d", - "ag ne", - "ế t", - "Ġpe el", - "Ġз аб", - "Ġn odes", - "Ġtrend s", - "ĠSw itch", - "ĠA ward", - "ĠOr ig", - "ĠH al", - "Ġest as", - "Ġ3 60", - "Ġsim ult", - "Ġcom ic", - "Ġm Ãł", - "Ġbal anced", - "ĠPrin cess", - "Ġkilomet ers", - "á» ©", - "Ġpart ir", - "ì¤ ij", - "so ft", - "ĠV iew", - "Ġbi ological", - "in st", - "4 4", - "Ġman era", - "Ġcompreh ensive", - "ĠS ab", - "Ġcr imes", - "y ers", - "ĠComp any", - "ĠPh ot", - "Ġpou co", - "i ac", - "Ġbe im", - "in ate", - "Ġsub sequ", - "ĠMay or", - "Ġcent uries", - "è res", - "ìŀĸ ìķĦìļĶ", - "Ġê·¸ëŁ ¼", - "ĠFra u", - "ĠO H", - "Ġëģ Ŀ", - "ĠN ah", - "ĠSer ies", - "Ġover night", - "íĴ Ī", - "ĠâĢ ¢", - "Ġtra ve", - "atter ed", - "Ġwar ri", - "ĠGru nd", - "ĠInd ones", - "Ġsc ra", - "ob y", - "ĠBro ok", - "Ġcur s", - "Ġë ¸", - "Ġexpl ains", - "ram atic", - "Ġparticip ating", - "Ġmin ut", - "Ġcontract s", - "Ġg egen", - "Ġdisappe ared", - "ĠS N", - "Ġrob ust", - "ap h", - "Ġsh rim", - "Ġdev ast", - "c ope", - "Ġme ets", - "Ġpeace ful", - "m ate", - "Ġwe ld", - "Ġ× ª", - "d on", - "Ñĥ ÑĤÑĮ", - "Ġregister ed", - "ĠN ik", - "j in", - "Ġc av", - "Ġe cht", - "io x", - "Ġflow ing", - "но ÑģÑĤи", - "Ġto e", - "Ġent ity", - "ов а", - "f its", - "ĠPat rick", - "ÑĤ ÑĢ", - "Ġle verage", - "Ġcor rel", - "i ah", - "Ġstr ings", - "ist inct", - "Ġg ue", - "arch y", - "Ġteng o", - "ım ız", - "Ġor bit", - "ä¸ º", - "Ġе ÑīÑij", - "ca ke", - "Ġ׾ ×Ķ", - "ĠMin nesota", - "Ġbra ke", - "ow ie", - "Ġcra w", - "ê¸°ë ¥¼", - "Ġprogram me", - "ĠÑģл ÑĥÑĩ", - "åı ª", - "ien ces", - "ĠO ui", - "ĠP ers", - "im iento", - "ĠIn vest", - "Ġsl ower", - "æĻĤ åĢĻ", - "ĠB eth", - "Ġnur se", - "ĠSpr ing", - "S p", - "Ġun employ", - "д и", - "Ġgen ius", - "ĠA aron", - "Ġê·¸ëŁ ¬", - "Ġe i", - "ãģĹ ãĤĩ", - "Ġtank s", - "Ġau jourd", - "Ġcomplex ity", - "ĠÑĢ еÑĪ", - "Ġold est", - "Ġlet z", - "åħ ¥", - "Ġphenomen on", - "pr int", - "ĠBund es", - "it at", - "ê» ĺ", - "Ġ4 2", - "ĠW i", - "Ġinc om", - "Ġg ek", - "Ġembr ace", - "Ġt ies", - "out e", - "Ġd ose", - "ĠF riends", - "Ñĭ ÑĤ", - "егод нÑı", - "Ġor g", - "Ħë ¡ľ", - "ó g", - "Ġex ceed", - "Ġgod s", - "Ġê±° ìĺĪìļĶ", - "Ġsoci et", - "ĠUn ivers", - "it ät", - "Ġword en", - "Ġsm oking", - "Ġint ens", - "ab ul", - "em ia", - "è ij", - "4 7", - "f ly", - "Ġ200 6", - "ĠSer iously", - "Ġprze z", - "æ ¼", - "c re", - "Ġn an", - "Ġmod es", - "ов аÑĤÑĮ", - "ĠH ang", - "em en", - "Ġbenefic ial", - "Ġvot ers", - "ĠBro ad", - "Ġb ent", - "W ow", - "Ġm ul", - "åĵ ¥", - "ĠU C", - "Ġdam aged", - "ĠUk raine", - "Ġw ipe", - "Ġst ones", - "Ġman agers", - "Ġr ab", - "ÑģÑĤÑĢ о", - "l at", - "Ġde ce", - "Ġgraph ic", - "Ġf oss", - "Ġdisag ree", - "ĠAm en", - "Ġsec rets", - "ho le", - "ink le", - "Ġfortun ate", - "Ġì ±", - "ìľ Ħ", - "èIJ ¬", - "Ġhab its", - "Ġbur ied", - "Ġh in", - "Ġvirt ually", - "ol as", - "ĠR P", - "ĠT ab", - "l ow", - "Ġsacr ific", - "Ġestim ated", - "ol n", - "Ù ĭ", - "c ur", - "ĠFe el", - "Ġcast le", - "Ġus eless", - "Ġdis g", - "ĠJac ob", - "Ġga an", - "Ġup side", - "Ġpare ce", - "ãĥ³ ãĥ", - "Ġsh ipping", - "ĠC R", - "Ġdis rupt", - "ac ter", - "UN D", - "f u", - "å® Į", - "ĠP ick", - "ĠChar l", - "ĠB ull", - "Ġenter prise", - "Ġpunish ment", - "ack ing", - "Ġfr action", - "Ġtab let", - "Ġch ord", - "Ġsimilar ly", - "åħ¶ 實", - "ĠTor onto", - "Ġcour ts", - "ÄŁ l", - "esz cze", - "Ġpron oun", - "ĠS ister", - "ĠM P", - "Ġgreat ly", - "ĠD ank", - "ic op", - "Ġgar bage", - "Ġresol ve", - "ĠS af", - "ĠG un", - "Ġcomp ound", - "Ġë° °", - "ĠMus ik", - "âĻ «", - "Ġcha os", - "ĠWhen ever", - "Ġe uros", - "Ġor chest", - "Ġrefr iger", - "al an", - "ภ·", - "ĠAm azing", - "Ġp ud", - "ag an", - "Ġj eszcze", - "is y", - "Ġaccur acy", - "ĠA ma", - "is ode", - "ë ĮĢ", - "Ġinterpret ation", - "ĠL iber", - "æ ·", - "c am", - "Ġevol ved", - "ĠK ay", - "ÑĨ Ñĭ", - "Ġcreat or", - "it as", - "Ġal arm", - "Ġcelebr ation", - "z ent", - "Ġfun cion", - "Ġo v", - "umb ling", - "Ġ %", - "ภĪ", - "Ġrestrict ions", - "Ġн ав", - "ĠK inder", - "Ġban ana", - "ÑĮ Ñı", - "Ġdiam eter", - "Ġnor thern", - "ur ers", - "ĠP as", - "æĪij çļĦ", - "Ġwork force", - "Ġj ung", - "Ġguar ante", - "Ġequ ilib", - "Ġsu ite", - "Ġeu ro", - "Ġdel iber", - "S te", - "Ġdownt own", - "Ġch in", - "Ġc odes", - "ed ia", - "Ġshe ep", - "res hold", - "wn ie", - "ó b", - "Ġunder lying", - "l ia", - "j er", - "ÏĢ ÏĮ", - "ç Ŀ", - "th rop", - "Ġz ap", - "Ġvac uum", - "ĠH ab", - "Ġwra pped", - "ì ¢", - "Ġinvent ory", - "м а", - "Ġco ord", - "Ġpl ates", - "Ġsy mm", - "T e", - "ĠwÅĤa ÅĽnie", - "Ġreach es", - "Ġlon ely", - "S cript", - "le e", - "ess er", - "Ġê± ¸", - "ĠGes ch", - "ĠMo ving", - "Ġré p", - "ĠV ill", - "åIJ Ī", - "ĠR achel", - "Ġtem os", - "ON E", - "Ġstra in", - "Ġang el", - "Ġf Ã¥", - "T r", - "Ġach o", - "Ġhighlight s", - "ĠW er", - "ĠCar l", - "Ġbl ur", - "Ġreg ards", - " ·", - "ил ÑģÑı", - "Ġrec re", - "ĠY ani", - "U CK", - "ł ¸", - "Ġelectr ons", - "ĠSp iel", - "Ġv ed", - "Ú ¾", - "Ġbe am", - "Ġid iot", - "ë ĵ¤", - "на Ñĩ", - "id d", - "Ġsk i", - "it ative", - "Ġhyp othes", - "ãģ§ãģĻ ãģŃ", - "ent er", - "ĠìķĦëĭĪ ë", - "Ġih re", - "Ġpre view", - "ang el", - "Ġdem on", - "Ġd us", - "Ġd ic", - "ĠK om", - "LE Y", - "... !", - "Ġsie ht", - "ĠSon ic", - "Ġten ho", - "an as", - "Ġdig it", - "ĠMa ar", - "Ġunder grad", - "oun cer", - "uff y", - "Ġconvers ion", - "Ġdis connect", - "Ġe cho", - "om er", - "Ġcurric ulum", - "Ġper ché", - "Ġw and", - ".. ?", - "Ġroll ed", - "Ġentreprene ur", - "Ġtheore t", - "ĠÑī о", - "Ġins ights", - "Ġzus ammen", - "o in", - "ret t", - "p rodu", - "Ġvisit ors", - "e ous", - "Ġgrand mother", - "Ġhum or", - "Ġн иÑħ", - "zen ia", - "ins on", - "Ġres et", - "Ġbase ball", - "Ġmatch ing", - "ëĭ¤ ê°Ģ", - "Ġpun to", - "ì ¡", - "Ġre de", - "Ġaddress ing", - "Ġfore cast", - "ĠB ol", - "Ġcol ored", - "Ġdocument ation", - "Ġexpect ation", - "ĠNor thern", - "Ġcre o", - "Ġà® ļ", - "f on", - "Ġuns ere", - "U M", - "Ġcop ies", - "Ġexpand ed", - "Ġveter ans", - "ĠAl m", - "Ġво обÑīе", - "Ġpsych ological", - "Ġnos so", - "Ġpay ments", - "im eters", - "Ġ-- >", - "ĠJenn ifer", - "Ġvolunte ers", - "os se", - "or ious", - "ĠбÑĭ ли", - "è Ĥ", - "ĠEs s", - "w s", - "ĠB C", - "ĠI C", - "W oman", - "Ġv ont", - "Ġeth nic", - "EN N", - "им о", - "Ġlo b", - "Ġou i", - "c s", - "Ġre he", - "Ġìł ģ", - "Ġch ick", - "ús ica", - "Ġk ont", - "ĠDist rict", - "Ġp ile", - "Ġа в", - "ей ÑģÑĤв", - "Ġ £", - "Ġiss ued", - "Ġком п", - "Ġpros per", - "Ġprof ound", - "ĠDe ar", - "Ġãģ ĵ", - "Ġfund ed", - "Ġb isa", - "ŀ ĺë", - "× Ł", - "ĠìĿ ĺ", - "Ġtw elve", - "ĠChamp ions", - "éĿŀ 常", - "Ñģ л", - "Ġ200 5", - "p m", - "Ġon de", - "Ġdiff é", - "ĠCh all", - "Ġdifficult ies", - "Ġgar age", - "Ġd á", - "ün k", - "Ġë¬ ¼", - "Ġtr an", - "Ġsubm itted", - "z w", - "ÙĪ ا", - "Ġar k", - "ĠìĦ ±", - "Ġgrocer y", - "он а", - "i ere", - "Ġa est", - "Ġexhib ition", - "Ġr és", - "Ġconsist ency", - "Ġcook ie", - "н ей", - "Ġrepl acement", - "æ² ¹", - "ĠS em", - "ĠìĤ¬ ìļ©", - "8 00", - "Ġgen es", - "Ġtrans action", - "ĠE L", - "Ġdur ante", - "ib les", - "ĠE at", - "t ail", - "iss ance", - "Ġto ss", - "Ġsurv ived", - "Ġoff ices", - "Ġsupport ive", - "Wh ere", - "Ġtout es", - "Ġë§ ī", - "Ġj okes", - "ier on", - "ap ers", - "Ġm ature", - "ĠM arsh", - "Ġs ido", - "k ind", - "Ġreal mente", - "ĠChe f", - "Ġquel que", - "Ġjud ges", - "e ft", - "ER S", - "Ġj et", - "Ġpers ons", - "è »", - "iz ations", - "ri k", - "Ġsh ops", - "ĠW y", - "Ġele g", - "qu è", - "qu oi", - "Ġjug a", - "Ġíķľë ²Ī", - "ĠQuest ion", - "ĠGlo bal", - "Ġìķ½ ê°Ħ", - "ĠSt ation", - "æİ ¥", - "ĠOh io", - "Ġstick y", - "Ġst ressed", - "Ġg ün", - "Ġí Ŀ", - "ÑģÑĤ Ñĥп", - "é ¡Į", - "ĠPh D", - "im mer", - "Ġment or", - "Ġinv ented", - "Ġre un", - "Ġine vit", - "Ġpol ÃŃt", - "Ġexec ute", - "ĠSt ory", - "Ġout standing", - "Ġgu er", - "ĠR ain", - "Ġch oses", - "ĠT it", - "ĠÑģ еÑĢ", - "ĠSing apore", - "ĠN one", - "Ġch ronic", - "°ë į°", - "Ġe go", - "æł ·", - "ES T", - "ãģĤ ãĤĬ", - "ĠW ang", - "ĠN AT", - "Ġa ug", - "Ġdes ktop", - "Ġetern al", - "ĠìĤ¬ ìĭ¤", - "ĠConst itution", - "ìĤ ¬ë", - "×Ļ× ľ", - "p res", - "ĠТ Ñĭ", - "Ġinter f", - "Ġlist s", - "Ġfight s", - "ft en", - "ĠI owa", - "Ġmotiv ated", - "ĠH osp", - "Ġelse where", - "Ġpath s", - "Ġinst ances", - "B l", - "r ange", - "á» ±", - "ĠS it", - "man a", - "Ġìĭľ ìŀij", - "Ġm ình", - "ans as", - "Ġs na", - "Ġphilos oph", - "Ġpas se", - "Æ°á» Ŀi", - "ak h", - "ent al", - "Ġih n", - "ru ctor", - "Ġв аÑĪ", - "Ġgener ous", - "Ġp ivot", - "п ол", - "Ġjam ais", - "Ġcom ent", - "ĠL ew", - "od zi", - "ĠX box", - "Ġв од", - "Ġcons ent", - "ī ìŀ¥", - "Ġdis par", - "l ass", - "ĠGovern or", - "Be ifall", - "Ġê° ľ", - "Ġbelo ved", - "׳ ×ķ", - "se ll", - "Ġhon ored", - "le h", - "Ġw äre", - "un ting", - "Ġfra ud", - "ĠR AM", - "ê± ¸", - "Ġkill s", - "Ġeconom ics", - "0 4", - "п еÑĢ", - "Ġco isas", - "Ġи гÑĢ", - "ÃŃ m", - "Ġmö chte", - "Ġìµ ľ", - "Ġstim ul", - "Ġfast est", - "l v", - "Ġg én", - "ĠS ounds", - "Ġ19 70", - "Ġhome work", - "spe aking", - "Ġencour aging", - "Ġqu ery", - "Ġre vers", - "pro fit", - "Ġd y", - "Ġìŀ ij", - "ëĬĶëį° ìļĶ", - "Ġso ap", - "ĠG all", - "ĠC N", - "ĠAn s", - "Ġf ic", - "ank s", - "Ġdess ert", - "ĠìłĢ íĿ¬", - "ĠM aking", - "Ġcome ç", - "ê³ Ħ", - "Ġassoci ation", - "D ad", - "he e", - "Ġh ogy", - "Ġap ro", - "Ġinvis ible", - "Americ an", - "í İ", - "Ġvi be", - "Ġem issions", - "Ġadvoc ate", - "Ġkick ed", - "Ġ vel", - "Ġsum mar", - "Ġfre aking", - "ch ron", - "Ġpin ch", - "Ġwszyst k", - "isc al", - "Ġpro ved", - "Ġmind ful", - "Ġt ä", - "Ġno ises", - "Ġisol ated", - "Ġcross ed", - "Ġê° ķ", - "Ġvo ilÃł", - "Ġch ore", - "ĠR A", - "C om", - "Ġrelax ed", - "at ro", - "Ġpre vention", - "Voice over", - "O D", - "ĠCo vid", - "Ġsepar ation", - "Ġ- [", - "иÑĩ его", - "çĻ ¼", - "ĠS D", - "ble ep", - "Ġindepend ence", - "Ġpart ial", - "Ġalgorith ms", - "ĠAny one", - "Ġassoci ate", - "h um", - "ic ular", - "Ġb ạn", - "Ġbatt les", - "G ood", - "App lause", - "Ġbast ante", - "Ġadv ant", - "ĠS weet", - "Ġref used", - "ãĤ ¸", - "ĠÑĤеб е", - "pl et", - "Ġencour aged", - "åĵ ¦", - "Ġmir acle", - "ĠB un", - "ĠV ar", - "rim ination", - "e lect", - "ĠM ult", - "Ġdeliver ing", - "e ing", - "Ġc m", - "ne hmen", - "ĠL ine", - "Ġë§ Į", - "en ced", - "ĠS ound", - "ĠCont in", - "ij d", - "UN G", - "k le", - "Ġth reshold", - "Ġcomp act", - "ad t", - "Ġto es", - "ĠP ur", - "own ed", - "ment ed", - "Ġdes igning", - "Ġvacc inated", - "Ġexha ust", - "Ġbas ics", - "Ġcons ists", - "ĠGu y", - "ac zy", - "Ġm ÃŃ", - "w on", - "å® ³", - "Ġ8 5", - "æ Ĥ", - "Ġm um", - "Ġign or", - "Ġprint ing", - "ac ular", - "p ow", - "Ġexpand ing", - "Ġg ir", - "ĠC ab", - "íĺ ¸", - "ÑĤÑĮ ÑģÑı", - "ĠìĹ¬ëŁ¬ë ¶Ħ", - "Ġang les", - "Ġterm inal", - "ĠW on", - "ĠInter esting", - "Ġcross ing", - "Ġbond s", - "Ġpu eden", - "Ġor b", - "lar ın", - "Ġcreep y", - "Ġnutr ition", - "Ġall ies", - "Ġwire less", - "Ġdes ired", - "Ġcomp ute", - "ĠAri zona", - "ĠBeaut iful", - "Ġprodu ces", - "Ġnuest ro", - "t ed", - "Ġel igible", - "ĠÑģ оз", - "ic ial", - "ĠH ero", - "Ġcons ume", - "Ġrob ots", - "Ġpurch ased", - "c ción", - "Ġ iz", - "ượ c", - "ίν αι", - "ĠØ£ ÙĨ", - "Ġshad ows", - "ĠMed ia", - "Ġprin cess", - "Ġk lar", - "Ġwood en", - "Ġus ar", - "Ġg üzel", - "Ġsl ot", - "r ade", - "Ġë Ĵ", - "Ġhar mon", - "Ġingred ient", - "ors hip", - "ek i", - "Ġgrand father", - "Ġexcit ement", - "Ġpolit icians", - ".. !", - "Ġout s", - "Ġsepar ately", - "ĠÑı к", - "ĠW elt", - "ĠP ow", - "j an", - "Ġorient ation", - "åı ĭ", - "L C", - "age m", - "ÛĮ Úº", - "åIJ Ĺ", - "Ġbran ches", - "ad en", - "rent e", - "ĠI hr", - "as m", - "Ġest ão", - "ĠN ic", - "Ġsla ve", - "Ġcomp ress", - "c rowd", - "Ġclim bing", - "ĠMan agement", - "ĠB ah", - "Ġpan ic", - "Ġk or", - "Ġcool ing", - "Ġb ind", - "Ġз ад", - "Ġr ack", - "Ġent it", - "Ġs ends", - "Ġyour selves", - "d es", - "ĠMuslim s", - "Ġí ļ", - "ism a", - "cy cle", - "un kt", - "ĠC ore", - "Ġinj uries", - "Ġident ical", - "ка Ñı", - "ĠDeutsch land", - "Ġе е", - "is an", - "Ġtr uc", - "let on", - "Ġback up", - "Ġult ra", - "Ġab und", - "ille urs", - "Ġby ÅĤo", - "åħ ĥ", - "ort ed", - "Ġearth qu", - "Ġк л", - "Ġobs ervation", - "Ġmainten ant", - "el en", - "Ġsett led", - "Ġp ela", - "ĠE conom", - "Ġ Õ", - "Ġste ering", - "ĠAL L", - "ĠC her", - "Ġpat ience", - "ĠS now", - "Ġb or", - "Ġworth y", - "Ġcá i", - "Ġ× §", - "Ġκ α", - "d og", - "ĠK aren", - "ill es", - "Î ²", - "Ġagric ulture", - "×ķ× Ł", - "ĠSe an", - "Ġsens ors", - "íķ ´ë", - "ag h", - "Ġpublic ly", - "Ġpe ux", - "ĠAlex ander", - "Ġprior it", - "Ġla zy", - "ard on", - "atter ing", - "Ġcost ume", - "س ت", - "è¿ ĺ", - "Ġun w", - "Ð Ľ", - "Ġthick ness", - "qu ito", - "g unt", - "ist as", - "ne ys", - "ĠëIJĺ ê²Į", - "ĠBr asil", - "Ġto ken", - "Ġaff ili", - "l on", - "Ġf Ã¥r", - "ĠBe ach", - "Ġw itch", - "ĠSe ven", - "Ġp ant", - "λ λ", - "Ġcapt ain", - "å Ŀ", - "Ġve ut", - "Ġpou voir", - "ac z", - "ĠBar b", - "Ġut ility", - "Ġcontempor ary", - "Ġobt ained", - "Ġpainting s", - "e ar", - "Ġpe an", - "ĠO g", - "Ġc ust", - "л ем", - "Ĥ ĺë", - "ĠIs so", - "Ġac onte", - "ĠTe le", - "ĠAss istant", - "à ī", - "íĸĪ ìĬµëĭĪëĭ¤", - "Ġcount s", - "Ġbu ck", - "ĠDe ep", - "Ġtack le", - "Ġh arsh", - "Ġdec ides", - "éĹ ľ", - ". âĢĭ", - "éĤ Ĭ", - "ĠAng el", - "Ġlay ing", - "Ġcal ories", - "Ġcontro lling", - "Ġadvant ages", - "ĠÑįÑĤ ой", - "Ġappro aching", - "Ġthreat s", - "ak an", - "em atic", - "m ann", - "ê³ µ", - "m umbles", - "ac ió", - "Ġmaint aining", - "Ġfound er", - "l ah", - "f ight", - "Ġadm itted", - "âĢ¦ .", - "ķ Į", - "ab ol", - "Ġus age", - "Ġn onsense", - "ĠPal est", - "Ġcont re", - "ĠDemocr atic", - "ĠE R", - "j ekt", - "Ġar bit", - "Ġг ол", - "ĠMich elle", - "ich er", - "es h", - "ĠP ho", - "к ом", - "4 9", - "ĠEner gy", - "ο Ïį", - "Ġc ents", - "Ġref ers", - "Ġg ospel", - "ĠSh a", - "ĠSh are", - "×Ļ× ł", - "Ġclin ic", - "ĠëĦ £", - "Ġequ ality", - "ug s", - "Ġsh ed", - "Ġplan es", - "Ġtout e", - "re ck", - "Ġstra nd", - "Ġbi ology", - "Ġle ague", - "ĠP ok", - "Ġnúmer o", - "ĠCo ast", - "Ġconsist ently", - "Ġnuc le", - "OO OO", - "Ġob jet", - "Ġch or", - "Ġg inger", - "Ġd abei", - "Ġcoop eration", - "à¯į .", - "nt en", - "ç ¤", - "l Ãł", - "ìĸ ij", - "r ado", - "Ġpass ive", - "Ġglo ves", - "Ġunder ground", - "Ġlog ical", - "Ġk et", - "Ġfunction ality", - "¸ë ¦¬", - "Ġport al", - "ell er", - "×Ļ× ¨", - "ĠT ed", - "ĠG re", - "IJ ľ", - "Ġperson nel", - "Ġemer ging", - "ĠF ür", - "Ġmeant ime", - "usal em", - "ĠC lear", - "Ġtra pped", - "Ġìļ °", - "Ġdis pl", - "Ġmet tre", - "Ġmun icip", - "Ġwithd raw", - "Ġsp at", - "un es", - "Ġaccess ibility", - "æĪij 们", - "Ġap are", - "Ġpros pect", - "Ġн аз", - "Ġcop per", - "ĠP RO", - "Ïħ ÏĦ", - "Ġattack ing", - "ĠV in", - "ĠSt one", - "Ġinvestig ate", - "st yle", - "ĠÎ »", - "ë ¡Ŀ", - "ë§ Ī", - "Ġins pect", - "Ġli ver", - "ал иÑģÑĮ", - "Ġser a", - "hal ten", - "em an", - "Ġmin istry", - "' '", - "Ġd ots", - "ãħĭãħĭ ãħĭãħĭ", - "Ñĥ ÑģÑĤ", - "ĠJ ak", - "AK E", - "Ġg aps", - "uck er", - "ĠинÑĤеÑĢ еÑģ", - "ĠEm ily", - "Ġinter val", - "Ġt ender", - "ĠTechn ology", - "g ame", - "Ġtri b", - "ÙĦ ا", - "ĠDevelop ment", - "Ùħ ا", - "Ġwr ist", - "Ġf ires", - "Ġtarget ed", - "ìł IJ", - "Ġso d", - "íļ Į", - "Ġoldu ÄŁ", - "Ġse asons", - "vent ions", - "Ġн его", - "Ġsomet ime", - "ли в", - "n é", - "Ġt ú", - "ĠDe us", - "Ġexec ution", - "á p", - "ĠCh ange", - "ĠInd eed", - "Ġreg ulation", - "ĠH ung", - "é is", - "Ġwish es", - "Ġj azz", - "Ġstruct ural", - "Ġblow ing", - "Ġby Äĩ", - "Ġtherm al", - "ph ant", - "ÑĢÑĥ з", - "ан ÑĤ", - "ĠP ull", - "Ġconf usion", - "нÑĭ ми", - "Ġscen arios", - "ìłģ ìľ¼ë¡ľ", - "Ġд еÑĤ", - "Ġtatto o", - "Ġaut re", - "Ġhe ating", - "Ġtreat ing", - "Ġпон им", - "Ġexc lus", - "ĠL OL", - "we ar", - "ag le", - "Ġzur ück", - "Ġr ational", - "s u", - "Ġdet er", - "ĠN ative", - "à®ķ ள", - "ach ed", - "Ġ ãĥ", - "ĠEnt onces", - "Ġhor a", - "ìĿ´ìĹIJ ìļĶ", - "Ġl ite", - "à «", - "Ġsix th", - "Ġбол ее", - "act or", - "Ġpsych ology", - "çĽ ¸", - "Ġdem ands", - "Ġpe er", - "Ġnew ly", - "ĠWW E", - "Don ald", - "ĠBo x", - "Ġp ine", - "Ġload ing", - "ĠN ico", - "Ġs ÅĤ", - "omm e", - "AR T", - "Ġrecru it", - "Ġbug s", - "arent s", - "ĠпÑĢ об", - "ĠIn side", - "ipp er", - "d ramatic", - "Ġplan ets", - "ord e", - "Ġy oga", - "ch ild", - "ĠMar ie", - "Ġãģ Ĥ", - "ĠB L", - "Ġfil med", - "Ġref resh", - "Ġtomato es", - "Ġf et", - "Qu é", - "Ġ !!", - "ĠëĤ ´ë", - "r ine", - "Ġinteract ive", - "s al", - "ann ah", - "pe z", - "ç¶ ĵ", - "Ġunderstand s", - "ĠTok yo", - "Ġlibr aries", - "Ġread er", - "ij IJ", - "o z", - "ĠEnd e", - "ĠF lo", - "Ġm ild", - "Ġpo etry", - "Ġж ив", - "æĦ Ľ", - "Ġbeh ave", - "Ġdo en", - "ĠSus an", - "p age", - "ra ham", - "Ġcommunic ations", - "Ġtun ing", - "Ġp ac", - "Ġanx ious", - "I O", - "M ark", - "Ġhi ç", - "book s", - "Ġp iss", - "Ġen abled", - "achel or", - "ĠF OR", - "Ġé c", - "ĠT R", - "il st", - "h at", - "ĠìĿ Į", - "Ġty ch", - "Ġj ar", - "Ġbuild s", - "ĠAr gent", - "Ġinter medi", - "Ġl ou", - "Ġa ra", - "Ġassign ment", - "Ġcabin et", - "Ġretire ment", - "ãģ »", - "Ġdis abled", - "ric a", - "Ġa wards", - "Ġbo ots", - "Ġacknow led", - "Ġth y", - "Ġêµ ¬", - "Ġsy nd", - "ни й", - "il ton", - "Ġprob l", - "ĠF al", - "Ġverd ade", - "Ġ7 00", - "ĠLe arning", - "oc us", - "Ġpal ace", - "N ot", - "t ain", - "c m", - "Ġmagn et", - "inc oln", - "Ġfig uring", - "ĠL yn", - "ĠB oss", - "ĠV O", - "Ġdiagn osis", - "Ġequ ipped", - "w atch", - "in os", - "ad ers", - "Ġsh elf", - "Ġorgan is", - "Ġn od", - "Ġk ız", - "pp ers", - "Ġrest ore", - "Ġart ic", - "ĠVo ice", - "ı yorum", - "ê² ©", - "Ġspread ing", - "Ġh ips", - "Ġw ard", - "ure au", - "Ġinter section", - "6 6", - "Ġ3 9", - "ç ³", - "Ġwait ed", - "ì ´", - "hh hh", - "Ġd ys", - "ĠE N", - "Ġb atch", - "Ġca f", - "Ġmark er", - "大家 好", - "or able", - "ó ria", - "Ġste pped", - "Ġcelebr ating", - "ан а", - "Ġwor n", - "ĠF ol", - "Ġpl a", - "Ġattempt s", - "Ġtwe et", - "Ġr ust", - "g ence", - "í Ĩµ", - "Ġre vel", - "Ġre cept", - "en ess", - "Ġ( (", - "ãĥ¼ ãĥ", - "! âĢĭ", - "ĠìĨ IJ", - "Ġinfluen ced", - "и ж", - "Ġкон еÑĩно", - "Ġcolleg es", - "ion i", - "Ġs ag", - "An n", - "ol ar", - "Ġexpress ions", - "Ġsu its", - "Ġowners hip", - "el and", - "pie ce", - "æĢİ ä¹Ī", - "Ġdesp ués", - "Ġt el", - "Ġins ult", - "Ġêµ īìŀ¥", - "ĠSm all", - "ĠF R", - "ok a", - "ber ries", - "ĠAnt on", - "ел Ñı", - "Ñı Ñģ", - "Ġval ve", - "act s", - "Ġwood s", - "à® £", - "Ġcult iv", - "Ġf á", - "ãģ¨ ãģĦãģĨ", - "Ġche ers", - "Ġassum ption", - "Ġfit ness", - "ÃŃ cul", - "Ġpod r", - "Ġwe it", - "ĠH ind", - "Ġd ign", - "Ġз н", - "Ġsqu ad", - "Ġdest ro", - "c ere", - "sh irt", - "imm t", - "eng ers", - "Ġs ä", - "k ÅĤad", - "Ġ ÈĻ", - "Ġocc as", - "Ġì¤ Ħ", - "Ġprocess or", - "ĠD M", - "ĠDad dy", - "Ġsoon er", - "Ġstraight forward", - "Ġdepart ments", - "ĠChr ome", - "Ġwork place", - "ĠPy thon", - "Ġm eng", - "ĠD AN", - "ĠI ce", - "ĠëĪ Ī", - "ĠG i", - "Ġh iring", - "Ġland ed", - "Ġdemocr atic", - "ied z", - "ãģĺ ãĤĥ", - "Ġse v", - "ic ia", - "Ġespe cial", - "ĠN ous", - "Ġh ät", - "Ġb ou", - "per t", - "ies z", - "åij Ģ", - "Ġv il", - "ÅĽ li", - "Ġî n", - "Ġloss es", - "éķ ·", - "Ġto ast", - "Ġreal m", - "ĠAust in", - "ĠIn formation", - "Ġres ume", - "Ġch ase", - "Ġsal ary", - "Ġë¶ Ħ", - "ли Ñĩ", - "ĠÑģл ед", - "ĠFur ther", - "Ġcar ing", - "Ġv ig", - "Ġval or", - "è¿Ļ 个", - "ĠÑĩ а", - "Ġanalyt ics", - "Ġglo be", - "ĠM AN", - "Ġn el", - "ìĿ´ì ķ¼", - "Ł ¼", - "Ġo y", - "íķĺ ìĦ¸ìļĶ", - "j en", - "Ġtrou bles", - "ah aha", - "Ġchurch es", - "u et", - "Ġmeasure ments", - "b il", - "ì ½", - "if ully", - "ин Ñĥ", - "ĠWil son", - "¦ ´", - "ĠíĮ Į", - "Ġì° ¨", - "Ġp úblic", - "ĠJer usalem", - "Ġn ails", - "Ġsp ine", - "Ġhe mos", - "Ġz n", - "qu is", - "ĠLe ben", - "Ġrefer ences", - "IT H", - "i per", - "ĠÑģеб Ñı", - "ì ģ", - "ĠW a", - "st ate", - "§ Ŀ", - "åħ ±", - "ĠGen er", - "Ġact ress", - "ĠEn joy", - "๠ĥ", - "Ġ× Ĵ", - "Ġinfect ed", - "Ġsh aking", - "Ġn ick", - "ภ¸", - "Ġf ot", - "Ġaccompl ished", - "u ke", - "Ġshe ets", - "Ġf ence", - "Ġnurs ing", - "Ġintrodu cing", - "Ġfe at", - "O ne", - "T O", - "Ġcl ubs", - "ĠBru ce", - "on ge", - "ch ange", - "ĠBat man", - "åı °", - "ĠOffic er", - "Ġhyd ro", - "Ġsupp lement", - "Ġc ela", - "Ġlong est", - "Ġcompet ing", - "Ġcon he", - "g iving", - "Ġbra ins", - "Ġlo ans", - "Ġw age", - "ĠCl inton", - "Ġs Äĥ", - "ane ous", - "Ġl ord", - "ÑĢÑĥ ж", - "Ġqu iz", - "Ġst iff", - "ĠL GB", - "s z", - "M E", - "m are", - "th ere", - "Ġn är", - "ĠM and", - "l ast", - "Ġd ag", - "Ġhalf way", - "ĠB and", - "Ġëĭ¤ ìĭľ", - "ĠA ren", - "Ġi le", - "P N", - "ent o", - "Ġalg um", - "Ġsoc cer", - "Ġblock ed", - "ĠJon athan", - "Ġse w", - "ĠTest ament", - "Ġv ale", - "Ġbehav i", - "å§ ĭ", - "Ġcon na", - "IC H", - "Ġaud iences", - "m l", - "amm ad", - "ĠìĤ ´ì", - "I GH", - "Ġr aces", - "em ed", - "Ġm á»Ļt", - "à ¯", - "Ġover s", - "Ġdecl ared", - "Ġs ana", - "ĠU na", - "ĠÑĢ е", - "uck s", - "Ġp airs", - "Ġan ge", - "N e", - "Ġup s", - "av y", - "ø r", - "ree k", - "Ġbehav iors", - "Ġreflect ed", - "Ġprior ities", - "Ġcon du", - "Ġret reat", - "Ġexp enses", - "Ġë´ IJ", - "Ġtri ple", - "Ġêµīìŀ¥ íŀĪ", - "ä lt", - "Ġind igenous", - "Ġmin ing", - "Ġaccept able", - "Ġru in", - "C A", - "u ine", - "Ġpip eline", - "ct ic", - "ê t", - "ĠвÑģ его", - "Ġb oun", - "ĠDig ital", - "ĠBo om", - "ÑĨ е", - "Ġл ÑĥÑĩ", - "Ġas c", - "ĮĢë ¡ľ", - "ĠGood bye", - "Ġrend er", - "ene z", - "ar re", - "ĠTH AT", - "b our", - "ic ión", - "ãĤ Ń", - "E very", - "Ġw ires", - "ĠPar liament", - "n ung", - "ate ur", - "ĠS ave", - "ĠPh ys", - "Ġam or", - "ĠE ve", - "Ġfr ight", - "Ġgam ma", - "Ġmic ros", - "m itt", - "ĠC ode", - "ĠBe y", - "pl ed", - "ĠиÑģп олÑĮз", - "ç Ĺ", - "ìĥ ī", - "å¥ ¹", - "Ġmon et", - "ĠJah re", - "Ġlux ury", - "Ġde af", - "Ġbet ray", - "Ġê² °", - "и ки", - "Ġdefe ated", - "Ġunder t", - "Ġwe g", - "Ġcool er", - "ãģķ ãĤĵ", - "iam i", - "éĤĦ æľī", - "ĠJess ica", - "ĠJ oy", - "Ġsoph istic", - "ени и", - "ðĿ ĺ", - "Ġch ili", - "ĠTy pe", - "Ġprote ins", - "Ġpresent ing", - "al ia", - "ìļ ¸", - "ĠMaj or", - "Ġmolec ule", - "um er", - "Ġcoll apse", - "ĠAny ways", - "ĠMount ain", - "ant ed", - "ãĢ IJ", - "Ġвиде о", - "æ° ´", - "A ud", - "Ġcon qu", - "Ġvo ll", - "Ġkn it", - "Ġmem br", - "ĠMark et", - "Ġd ari", - "Ġcalcul ated", - "г и", - "Ġshrim p", - "ĠM u", - "ĠпÑĢ оÑĤ", - "Ġìĺģ ìĥģ", - "Ġproduct ivity", - "Ġcogn itive", - "ĠHe b", - "ict ions", - "ê² ½", - "Ġcr é", - "f ör", - "Ġpray ing", - "ash i", - "ĠT ik", - "ó r", - "w en", - "ÑĮ Ñİ", - "ix o", - "Ġ( \"", - "ĠÑĤ ел", - "Ġìĸ´ëĸ ¤", - "ĠпеÑĢ ед", - "ĠD rive", - "ãĢ ij", - "ĠE qu", - "Ġequilib rium", - "Ġdescri bes", - "не е", - "4 2", - "ĠCur rent", - "y y", - "Ġabsor b", - "Ġsold ier", - "d ers", - "Ġtestim ony", - "Ġdec line", - "ľë ¡ľ", - "g age", - "Ġinsp ire", - "la pping", - "Ġspin ning", - "Ġsla very", - "Ġfac ial", - "Ġtrad itions", - "ári os", - "ĠHosp ital", - "Ġn est", - "ĠëĪ Ħ", - "Ġto i", - "Ġfe ars", - "ìħ ¨", - "ĠM uh", - "Ġgradu ation", - "Ġimpact ed", - "Ġa unt", - "ĠLet s", - "Ġalumin um", - "Ġdomin ant", - "ĠDav is", - "ĠNav y", - "Ġcom pt", - "op les", - "Ġest ava", - "è ¥", - "Ġsc al", - "Ġpres erve", - "ĠO pp", - "Ġpract ically", - "Ġmagn itude", - "Ġf itting", - "Ġcoordin ate", - "Ġfurn iture", - "ĠFam il", - "Ġexplos ion", - "Ġdocument ary", - "ĠS cript", - "Ġport ray", - "m at", - "Ġschedul ed", - "Ġdynam ics", - "ph y", - "ak y", - "ĠU I", - "C he", - "Ġcontinu ously", - "ĠPro v", - "å° ij", - "Ñĥ з", - "ra h", - "Ġger ne", - "pro of", - "Ġsecret ary", - "ĠPat reon", - "sc ream", - "ĠK ids", - "á»ĵ i", - "Ġk g", - "Ġuncertain ty", - "Ġк ажд", - "Ġmit ig", - "Ġread s", - "å· ²", - "ĠR u", - "Ġpri est", - "Ġн ед", - "Ġlimit ations", - "Ġflo at", - "6 00", - "ĠT oy", - "ĠJim my", - "Ġoff ensive", - "en i", - "ĠX i", - "Ġeye br", - "ĠTur k", - "Ġaccident ally", - "Ġoh ne", - "ĠS aud", - "9 5", - "ĠD utch", - "ан Ñģ", - "ĠSe attle", - "Ġëĵ ±", - "che ck", - "k ÄĻ", - "Ġcontrib utions", - "Ġbes ide", - "Ġqu indi", - "Ġfle w", - "æĹ ¶", - "Ø° ا", - "ĠL O", - "Ġwa ist", - "ĠE V", - "Ġhol idays", - "j on", - "Ġmis under", - "Ñı н", - "Ġb out", - "Ġd imin", - "Ạ½", - "ó l", - "ĠGr ace", - "Ġinput s", - "Ġden y", - "Ġform ing", - "ĠB ild", - "Ġad equ", - "Ġfol k", - "Ġreject ed", - "se mb", - "Ġfrust rated", - "op en", - "ĠBet ter", - "il on", - "Ġtow el", - "Ġdifferent ial", - "Ġsac red", - "Ġsa il", - "éĩ Į", - "ent imes", - "Ġgentle man", - "Ġicon ic", - "Ġcomp aring", - "Ġs agt", - "Ġtext s", - "Ġgrand ma", - "Ġroll s", - "Ġcont ents", - "ä¸į 好", - "оÑģ Ñģ", - "Ġsusp ension", - "ro it", - "¦ ¼", - "Ġasse z", - "Ġd ort", - "ĠM ath", - "ĠVict or", - "ĠJava Script", - "ä¸į å°į", - "Ġen han", - "Å Ļ", - "ĠB ush", - "Ġpromot ion", - "Ġk in", - "Ġmon sters", - "ĠColor ado", - "ĠÎ ²", - "íķ´ì ļĶ", - "æŃ £", - "iffer ent", - "Ġn aked", - "Ġpro d", - "et ics", - "ĠW oman", - "Ġtreat ments", - "Ġest oy", - "v é", - "Ġlif ting", - "Ġy apt", - "ĠRo ber", - "Ġì¹ ľ", - "Ġsubst itute", - "ak u", - "r idge", - "Ġê± °ë", - "Ġrespond ed", - "Ġb é", - "ĠEngine er", - "Ġtransfer red", - "ë ²", - "Ġha ber", - "o op", - "ĠW E", - "Ġv est", - "Ġfor ty", - "ĠD S", - "Ġ200 4", - "Ġco aching", - "n om", - "ĠB ab", - "Ġn ossa", - "ĠJ ake", - "Ġg y", - "Ġde leg", - "Ġìŀ ł", - "ĠкÑĢ аÑģ", - "Ġstand point", - "Ġdis ad", - "Ġart work", - "A d", - "ill o", - "ĠÄij ược", - "ĠPr om", - "ĠL ib", - "Ġcritic ism", - "Ġcontact s", - "ÑĢ ам", - "Ġachieve ment", - "ÐĶ а", - "Ġdiss ol", - "ĠVeg as", - "Ġstream s", - "ĠK ent", - "ĠعÙĦ Ùī", - "Ġrad ius", - "Ġsu cks", - "ĠA ch", - "Ġf i", - "ou st", - "ĠлÑİд и", - "Ġpal ette", - "ĠH az", - "ĠAnth ony", - "Ġtem a", - "ĠC os", - "Ġsa fer", - "α ÏĤ", - "Ġcont rad", - "Ġma ior", - "Ġinfl ation", - "ĠSil ver", - "Ġatt ending", - "íķľ íħĮ", - "art o", - "Ġapplaud ing", - "Ġcomput ing", - "ĠH at", - "æ »", - "k now", - "mak ers", - "Ġcon oc", - "Ġeduc ated", - "Ġmod ified", - "Ġinc lusion", - "ment al", - "ŀ IJ", - "is ia", - "ĠÏĢ οÏħ", - "Ġa un", - "ĠIre land", - "Ġk ö", - "Ġcompl iance", - "Ġinsp iring", - "иÑĤелÑĮ но", - "Ġdisp os", - "ì° ¨", - "Ġw ip", - "r ical", - "raw d", - "Ġt res", - "Ġmob il", - "olut ions", - "B O", - "Ġb ounce", - "Ġassum ed", - "ĠMed ical", - "Ġf iscal", - "Ġng Æ°á»Ŀi", - "ition ally", - "Ġst olen", - "ĠB M", - "Ġmechanism s", - "ε ί", - "Ġqual ified", - "Ġìŀ IJë", - "ught ers", - "ĠH IV", - "ĠL ots", - "Ġser vers", - "Ġcar r", - "ĠT ogether", - "Ġattract ed", - "Ġk r", - "æĪij æĺ¯", - "th ur", - "in in", - "ĠH alf", - "È Ľ", - "ĠP ap", - "Ġremind ed", - "AL L", - "Ġhel met", - "Ġbott les", - "Ġprofess ors", - "Ġse ine", - "ÅĤ Äħ", - "ãĥ ı", - "Ġê±° ìķ¼", - "Ġ×¢ ׾", - "f un", - "ĠB ird", - "Ġfight er", - "ĠëĶ °ë", - "ĠT ool", - "Ġt in", - "ino is", - "ë ¶Ħ", - "×Ļ× Ł", - "ĠC AR", - "åIJ į", - "irst y", - "Ġout door", - "ĠN S", - "ãħ İ", - "ff en", - "Ġl ud", - "H ello", - "Ġroll er", - "ie le", - "ĠPol and", - "Ġap a", - "ex p", - "Ġcertific ate", - "ĠT own", - "аÑİÑĤ ÑģÑı", - "ild e", - "Ġdeterm in", - "P R", - "Ġfree ze", - "Ġmain stream", - "Ġobject ives", - "b lo", - "Ġtak ie", - "åĵĪ åĵĪ", - "Ġë°Ķë ¡ľ", - "el et", - "ĠI V", - "ĠF ast", - "Ġd ere", - "em p", - "ĠD ra", - "ĠìŀĪ ìĹĪ", - "Ġdisc rimination", - "Ġε ίναι", - "ne cess", - "æ ®", - "ıģ ı", - "Ġpost ing", - "wi ÅĽcie", - "Ġl ub", - "Ġol ive", - "Ġr im", - "Ġmodel ing", - "Ġa ño", - "ĠPak istan", - "Ġover l", - "Ġinf lam", - "N E", - "ìĹIJ ê²Į", - "Ġatt ended", - "Ġdeal t", - "ĠAl t", - "ĠL incoln", - "Ġaw ake", - "Ġfil ters", - "ĠWith in", - "czy wiÅĽcie", - "Ġs û", - "ĠJohn ny", - "Ġintegr ity", - "Ġisol ation", - "ĠE asy", - "ĠпÑĢ ин", - "ĠAl ice", - "Ġsm iling", - "en ix", - ", ...", - "Î ¶", - "Ġbeg un", - "Ġjew el", - "Ġconvention al", - "Ġstat ist", - "Ġhand ed", - "Ġir re", - "Ġpro hib", - "Ġsatell ite", - "é¦ Ļ", - "ĠInd ust", - "Ġtra ged", - "Ġtra va", - "Ġih m", - "Ġcru el", - "ĠAg ora", - "ĠD oc", - "Ġz ones", - "Ġm all", - "Ġtr ay", - "×ķ× ł", - "Ġir rit", - "Ġk ans", - "ĠBe at", - "ud ge", - "ie lle", - "Ġtrust ed", - "Ġb ikes", - "ĠÑĥ п", - "ĠM ember", - "w ick", - "Ġcreat ors", - "Ġher itage", - "ind istinct", - "Ġres ur", - "enn en", - "C ome", - "Ġf iring", - "ĠBu eno", - "ĠТ о", - "ik an", - "ett es", - "Ġk es", - "Ġtri ps", - "Ġdivor ce", - "ĠK l", - "Ġcons ol", - "ke ep", - "기 ê°Ģ", - "ĠRep ort", - "Ġhost ing", - "Ġdiam ond", - "Ġcompl ic", - "Ġhel icop", - "Ġdep uis", - "d s", - "ĠCh an", - "Ñı л", - "Ġsc issors", - "il ation", - "Ġprop ortion", - "ER E", - "ĠÙĪ اÙĦ", - "int a", - "Ġmuch as", - "u ation", - "it is", - "æĬ Ĭ", - "Ñı Ñī", - "Ġni in", - "Ġemphas ize", - "uel a", - "Ġprodu cers", - "Ġr ze", - "änd er", - "ET H", - "æ º", - "Ġconst itu", - "åĽ ½", - "Ġperform ances", - "ist le", - "go v", - "ĠL iter", - "Ġincorpor ate", - "Ġeduc ate", - "ĠN in", - "ì ª½", - "Ùĩ Ùħ", - "el eration", - "×ķ× ij", - "Ġya ÅŁ", - "or ous", - "ĠC as", - "Ġgr ants", - "ëĬ ¥", - "am el", - "Ġê·¸ë łĩê²Į", - "ĠE ste", - "Ñħод иÑĤ", - "ĠпоÑģ ле", - "Ġg ent", - "Ġfocus es", - "al ities", - "ĠR h", - "ë ³´", - "æ° ij", - "ĠD ance", - "r r", - "Ġam er", - "Ġutil ize", - "Ġl ÃŃ", - "ĠAm ong", - "Ġpregn ancy", - "Ġlo ops", - "ал оÑģÑĮ", - "ĠM oh", - "Ġcatch ing", - "Ġglo b", - "Ġa jud", - "Ġ[ ?", - "ĠAn al", - "lo oking", - "Ġsurf aces", - "Ġprogress ive", - "Ġvir al", - "0 8", - "Î ¾", - "K A", - "Ġ ży", - "Ġpick s", - "ann on", - "Ġbul k", - "ĠR oss", - "Ġdescri bing", - "ĠG el", - "Ġloc ally", - "Ġend less", - "Ġmass age", - "Ġclean ed", - "Ġtravel ed", - "ен Ñĭ", - "Ġsent iment", - "ig ma", - "ĠN as", - "Ġchemical s", - "Ġright eous", - "ĠMag ic", - "Ġrel ates", - "Ġtruck s", - "Ġ19 60", - "åĪ ¥", - "Ġapp et", - "Ġsn acks", - "ĠSum mer", - "Ġy üz", - "Ġpr is", - "ĠMex ican", - "Ġtransp aren", - "Ġminor ity", - "Ġver te", - "Ġl assen", - "4 6", - "л ек", - "é p", - "ĠÑĦ илÑĮ", - "Ġi yi", - "Ġsp an", - "íķĺ ì§Ģ", - "Ġind icated", - "qu ar", - "Ġscholars hip", - "ĠLGB T", - "Ġhistor ically", - "ó ÅĤ", - "Ġmin ist", - "Ġpen et", - "ĠR ap", - "Ġcons ervation", - "çĽ ´", - "ĠH oney", - "ĠBe i", - "id el", - "Ġrespons ibilities", - "Ġmess y", - "ĠEx cept", - "OR E", - "Ġiniti atives", - "Ġjun ior", - "Ġdesign ers", - "Ġexpl oration", - "Ġspons or", - "Ġmob ility", - "Ġint eg", - "land o", - "Ġb ark", - "Ġindic ates", - "à ¶", - "Ġemploy er", - "å® ī", - "Ġcous in", - "Ġbo iling", - "Ġch rom", - "Ġç al", - "Ġper pet", - "Ġcont ained", - "Ġpark s", - "Ð «", - "ĠEngine ering", - "P lease", - "ĠStart ing", - "her o", - "Ġlaw yers", - "è¥ ¿", - "Ġz d", - "Ġfranch ise", - "ra ge", - "Ġint uit", - "ĠG L", - "re ach", - "ĠE lle", - "Ġnh Æ°", - "ĠN ord", - "Ġbe an", - "0 7", - "Ġple asant", - "å½ ĵ", - "v iron", - "Ġgrad ient", - "z us", - "ĠE M", - "Ġess ay", - "ìĹIJ ìļĶ", - "ế n", - "n u", - "á» «", - "ĠÃī s", - "Ġden omin", - "ĠGirl s", - "Ġperson nes", - "ĠاÙĦØ £", - "b ild", - "ĠSt at", - "Ġcompl iment", - "ĠK ate", - "Ġoptim al", - "Ġh id", - "د ÙĬ", - "Ġquick er", - "w all", - "E n", - "IN E", - "?? ?", - "ì² ´", - "ĠA ction", - "å Ł", - "Ġpenal ty", - "ĠK az", - "' ?", - "Ġc ried", - "Ġcan vas", - "ft e", - "Ġexc lud", - "¸ë ¡ľ", - "Ġemphas is", - "Ġen zy", - "ĠH ou", - "Ġoverse as", - "ÃŃ amos", - "å¸ «", - "ö glich", - "Ġhead phones", - "c n", - "ĠA ge", - "Ġa kan", - "Ġcharacter istic", - "íķĺë ©´", - "get s", - "Ġë¶ Ī", - "Ġr ival", - "Ġb orders", - "em ente", - "em ás", - "Ġy ol", - "Ġcom pe", - "end ers", - "ınd an", - "Ġmö glich", - "Ġbubb les", - "nat ural", - "Ġar med", - "Ġel abor", - "ĠìĿ´ë ²Ī", - "Ġwash ed", - "οÏħ με", - "è« ĭ", - "Ġfl avors", - "Ġexist e", - "Ġpre st", - "ĠThe ma", - "оп ÑĢоÑģ", - "er on", - "U E", - "er i", - "Ġconc er", - "Ġa ixò", - "åħ ©", - "Ġprotect ive", - "Ġзна Ñİ", - "ĠëĤ ł", - "ĠII I", - "Ġme er", - "ĠSh op", - "ll i", - "ĠOr der", - "ĠM Y", - "ĠG host", - "ãĤĤ ãģĨ", - "ad el", - "Ġst ole", - "Ġrele asing", - "ĠCom ment", - "Ġtra ins", - "ë ªħ", - "Ġw issen", - "ens ed", - "Ġdesc end", - "Ġf ier", - "Ġrad i", - "Ġpers u", - "ç ¢", - "Ġм н", - "ĠD est", - "Ġwor ries", - "it et", - "b as", - "Ġst ab", - "n ame", - "or ic", - "ĠCl ose", - "Ġalum ni", - "ĠS elf", - "ff e", - "it ating", - "ather ine", - "ĠRight s", - "Ġell os", - "Ġwar rant", - "Ġn erve", - "Ġveget able", - "ĠTe il", - "Ġê°Ļ ìĿ´", - "R Y", - "Ġsustain ability", - "Ġste ht", - "Ġbr id", - "ada ÅŁ", - "Ġt v", - "Ġdur ation", - "Ġpesso a", - "Ġmet rics", - "Ġad am", - "c as", - "аÑĢ и", - "Ġev ident", - "Ġdisplay ed", - "Ø§Ø ¦", - "Ġre ck", - "ĠBudd ha", - "Ġde le", - "ĠDie go", - "os ph", - "Ġb la", - "ĠM ik", - "ul ator", - "Ġ200 1", - "Ġpromot ing", - "y ch", - "ĠE X", - "Ġlast ly", - "Ġout line", - "Ġspir its", - "Ġve ux", - "Ġsubt ract", - "ĠÅŁ imdi", - "Ġp ins", - "Ġbur ger", - "Ġmol to", - "Ġhab ÃŃa", - "Ġë° ĺ", - "ig u", - "er st", - "Ġn en", - "Ġbac on", - "it ious", - "Ġcar ries", - "Ġprom ises", - "nd e", - "ĠLe ft", - "ĠL im", - "æ £", - "Ġ4 4", - "Ġcare ers", - "Ġì£ ¼ë", - "Ġspeed s", - "qu é", - "m ad", - "mark et", - "is me", - "Ġ200 3", - "Ġre cess", - "ĠJ UD", - "Ġrac ist", - "ĠSch l", - "Ġpar ler", - "Ġot ros", - "ish es", - "Ġconvert ed", - "aa aa", - "ани и", - "ĠAr k", - "ĠCh ance", - "Ġelement ary", - "ε ν", - "ink s", - "Inter viewer", - "Ġfre ely", - "al ah", - "Ġëĭ¤ë ¥¸", - "Ġrequest ed", - "Ġtor que", - "no ÅĽci", - "ou red", - "ĠSt aff", - "Ġst ain", - "ĠAl an", - "Ġv ere", - "ĠW inter", - "Ġdef ect", - "ied y", - "Ġbe ats", - "Ġh á", - "um n", - "o ons", - "it udes", - "Ġse it", - "o ly", - "Ġres erv", - "Ġext r", - "Ġphys ician", - "vis or", - "Ġhand ful", - "ĠN ations", - "Ġì¢ĭ ìĿĢ", - "uc cess", - "Ġup stairs", - "ĠSqu are", - "Ġhe in", - "ĠSe ason", - "ol is", - "Ġpr ince", - "Ġdef ensive", - "ç ½", - "Ġм еÑģÑĤ", - "Ñĸ й", - "Ġا ÙĨ", - "um ble", - "ê¹Į ìļĶ", - "Ġass ass", - "Ġcirc ular", - "Ġqual ities", - "Ġh mm", - "Ġbl own", - "ĠL iz", - "ĠK ur", - "ĠS A", - "Ġfind ings", - "Ġcol ours", - "Ġde lle", - "ĠI R", - "ĠA th", - "ĠD ub", - "ĠO x", - "ĠØ ®", - "Ġpo ckets", - "Ġgr ill", - "Ġswitch ing", - "Ġprefer red", - "ĠW ales", - "Ġex emplo", - "Ġchop ped", - "Ġvacc ination", - "Ġne uro", - "Ġspec ify", - "iv os", - "Ġser á", - "Ġz ie", - "Ġà® ®", - "Ġresult ing", - "ĠU gh", - "Ġmess ed", - "C D", - "Ġpa ar", - "Ġcom er", - "Ġcou ch", - "ĠFest ival", - "Ġ4 9", - "v ous", - "z ens", - "ç¨ ®", - "ĠKenn edy", - "ĠT s", - "Ġë³´ì Ĺ", - "Ġdemonst ration", - "Ġun to", - "Ġfrust rating", - "Ġlabor atory", - "Ġe gy", - "Ġbeaut ifully", - "Ġìŀ ¬ë", - "Ġal gu", - "Ġö yle", - "ä½ł çľĭ", - "ĠP H", - "Ġfort une", - "Ġclean er", - "ĠRob in", - "Ġsa us", - "ĠG eld", - "Ġk at", - "o bs", - "Ġol ur", - "Ġm att", - "Ġquest a", - "Ġsuggest ion", - "en cer", - "о ÑģÑĤ", - "Ġrad ar", - "Ġìŀ ¡", - "ish a", - "à® ¨", - "ãĤĵ ãģª", - "j es", - "Ġve el", - "ìĤ °", - "Ġauth ors", - "ãĢ İ", - "pl an", - "Ġcollabor ative", - "Ġinst inct", - "Ġfar ming", - "au ge", - "E du", - "Ġmembers hip", - "Ġsimult aneously", - "Ġb ake", - "Ġk ä", - "Ġlect ures", - "Ñĩ еÑģ", - "Ġprend re", - "Ġcoll aps", - "ĠS aya", - "ĠF ut", - "Ġy og", - "ĠR ather", - "ر ÙĬ", - "Ġcamp s", - "ол од", - "Ġsim ulation", - "ĠM ak", - "La ughs", - "Ġgre y", - "Ġsent ences", - "y en", - "ĠUn less", - "J e", - "ĠSat an", - "ĠÑĤак же", - "ĠN A", - "Ġbr on", - "Ġ? ]", - "Ġsoul s", - "Ġlight ning", - "Ġimag ined", - "Ġczy li", - "ps ilon", - "et ta", - "Ġbelie ving", - "Ġstrong est", - "ĠC ON", - "Ġquel ques", - "Ġimmig rants", - "Ġwall et", - "éĢĻ æĺ¯", - "ĠJer sey", - "Ġimplic ations", - "Ġfor b", - "ãĢ ı", - "Ġun believable", - "Ø§Ø ¡", - "Ġoper ational", - "ü s", - "ĠG M", - "Ġê·¸ëŁ °ëį°", - "Ġgrac ias", - "Ġent end", - "ĠReg ard", - "ro b", - "ĠÑĤ еÑħ", - "è ı", - "ĠRev olution", - "Ġwa ar", - "ĠB iz", - "th eless", - "Ġspons ored", - "qu ier", - "ĠìĿ ¼ë", - "Ġte k", - "ĠëIJ ł", - "ig keit", - "ĠL uck", - "ĠCertain ly", - "Ġto ll", - "Ġн иÑĩего", - "ĠM oney", - "ĠÑģ ÑĤоÑĢ", - "ĠDou ble", - "ĠW olf", - "Ġch unk", - "ά ν", - "it és", - "on ing", - "M ar", - "Ġgrand es", - "Ġcollect ions", - "ĠEurop a", - "Ġа ÑĢ", - "ĠâĢĭâĢĭ âĢĭ", - "Ġê·¸ëŁ¬ë ©´", - "Ġоб ÑĬ", - "Ġãģ ª", - "Ġìĭľ ê°Ħ", - "ĠC ustom", - "Ġì² ĺ", - "Ñĸ лÑĮ", - "Ġindivid ually", - "í Ĺ", - "Ġdo zen", - "Ġo we", - "ĠVict oria", - "åı¯ èĥ½", - "Ġbe et", - "ur b", - "Ġanal og", - "i ção", - "Ĥ ľ", - "so ever", - "Ġmod o", - "Ġsubscri bed", - "ìŀ ¬", - "Ġent ities", - "çī ĩ", - "Ġclos et", - "Ġrespond ing", - "Ġprin ter", - "ĠStep han", - "Ġby ÅĤ", - "ĠD om", - "ĠF ern", - "ĠP ier", - "ĠwiÄĻ c", - "Ġh ence", - "Ġmod ules", - "ãĥ ¬", - "ĠëĶ ±", - "ĠDann y", - "ĠÑģеб е", - "Ġv ad", - "ĠìĹ Ħ", - "Ġs ous", - "Ġsp here", - "B Y", - "ĠP ed", - "ign ed", - "Ġwhe at", - "Ġund ers", - "Ġevol ve", - "Ġdec lar", - "Ġlight ly", - "Ġident ifying", - "æĦı æĢĿ", - "Ġlegend ary", - "Ġgen uine", - "Ġgr ind", - "ĠU ne", - "ge ben", - "Ġb icy", - "Ġjump s", - "Ġprov ince", - "zi ÄĻ", - "Ġ×IJ× ł×Ļ", - "Ġh oc", - "Ġб л", - "ĠGr ad", - "Ġreven ge", - "ĠاÙĦ ت", - "o oh", - "æĭ ľ", - "аÑĨи и", - "å¹ ³", - "Ġelect ro", - "ĠëIJ IJ", - "ãģ§ ãģ¯", - "Ġf als", - "ri el", - "ok er", - "ĠEx cellent", - "ĠMor gan", - "Ġbr ick", - "Ġsubstant ial", - "Ġpoll ution", - "ĠT ür", - "ĠEv et", - "Ġl ung", - "ãģ ĸ", - "×Ļ× ©", - "omm es", - "Ġreal izing", - "Ġhum ble", - "ĠL ock", - "Ġb od", - "Ġìĸ ¸", - "Ġpe ers", - "uz z", - "Ġembed ded", - "Ġclar o", - "Ġag greg", - "Ġemploy ers", - "ĠR aj", - "Ġãģ ¨", - "ĠY i", - "Ġje u", - "at ers", - "Ġstri kes", - "n os", - "aut res", - "d r", - "op her", - "ĠApp arently", - "íĺ Ħ", - "Ġinf ant", - "ا ب", - "ÑĤ Ñĭ", - "í Ľ", - "Ú ¯", - "Ġred es", - "acaÄŁ ım", - "ĠDA VID", - "ĠCh icken", - "Ġperspect ives", - "Ġview er", - "Ġsh ar", - "ĠпÑĢо из", - "lig t", - "er os", - "it able", - "ил оÑģÑĮ", - "Ġdif ÃŃ", - "´ë į°", - "Ġret ired", - "Ġthat s", - "zen ie", - "be iten", - "Ġmy cket", - "ĠR ab", - "Ġinflam m", - "ì° ®", - "Ġd um", - "Ġdad dy", - "æľ Ł", - "Ġimm ers", - "Ġplay list", - "௠Ĩ", - "Ġtra um", - "Ġref use", - "st ep", - "à® ļ", - "c up", - "Ġpop s", - "r imin", - "ay ım", - "Ġa ld", - "Ġun necess", - "Ġd ah", - "ĠIr ish", - "Ġcomp r", - "la ÅŁ", - "T P", - "Ġtransl ated", - "S c", - "ce ÄŁim", - "´ IJ", - "Ġd rei", - "ĠлÑİд ей", - "Ġqu iero", - "Ġhe le", - "z lich", - "Ġapp les", - "Ġdistrict s", - "Ġcred its", - "Ġas p", - "Ġëĭ ¨", - "or al", - "å½ ±", - "Ġste pping", - "ĠV a", - "Ġg ains", - "6 5", - "Ġnuest ra", - "ed ay", - "ass ador", - "ĠL ind", - "Ġcrop s", - "ci endo", - "ig ue", - "Ġb ana", - "A m", - "Ġp ent", - "Ġadd iction", - "Ġpack aging", - "ä d", - "ª ¨", - "Ġper què", - "Ġcampaign s", - "Ġste ep", - "Ġne ue", - "Ġembarrass ed", - "Ġdist inction", - "it zer", - "åij Ĭ", - "Ġregist ration", - "Ġll am", - "ĠAlm ighty", - "li est", - "Ġu z", - "n ak", - "ç º", - "Ġter az", - "iam ente", - "Ġtrans actions", - "Ġc ôt", - "Ġswitch ed", - "Ġcom bo", - "Ġpray ers", - "Ġintern ship", - "Ġaddress es", - "Ġchar ity", - "ĠW OO", - "Ġb ait", - "è¿ ĩ", - "Ġ �", - "Ġf ica", - "ĠTy ler", - "ar u", - "Ġat oms", - "ĠLe vel", - "ĠпоÑĤ ом", - "Ġf ame", - "ul k", - "Ġteach es", - "Ġre build", - "ед ÑĮ", - "ĠIndones ia", - "ush i", - "ĠSh ort", - "Ġens uring", - "f s", - "e le", - "Ġmargin al", - "Ġconclud e", - "am t", - "Ġver ify", - "ĠMc Donald", - "Ġsk al", - "Ġrec onst", - "ĠM ann", - "Ġbas ement", - "Ġtransform ed", - "Ġoccasion ally", - "z one", - "ĠD ans", - "Ġкак ой", - "Ġdiagn osed", - "ĠÏĦ α", - "Ġcomm ands", - "Ġpresident ial", - "Ġab b", - "Ġbrack et", - "ĠL em", - "Ã¥ ng", - "Ġfavor ites", - "Ġrev ol", - "ĠíĬ ¹", - "Ġhar ass", - "é ħ", - "Ġcle ans", - "st änd", - "Ġknock ed", - "Ġpe oples", - "Ġmusic ians", - "Ġmut ual", - "ĠC old", - "8 8", - "ze j", - "at ie", - "ĠHon or", - "Ġobs essed", - "ĠM USIC", - "ĠBre ak", - "ú ng", - "Ġmod ify", - "Ġs öyle", - "Ġ×ŀ ×Ķ", - "ĠOn line", - "f o", - "ĠMill er", - "Ġlik ing", - "Ġin hab", - "Ġgrat itude", - "ĠJour nal", - "arn ess", - "J ohn", - "ĠG it", - "åī Ľ", - "Ġsin cere", - "ĠS ci", - "ĠE li", - "Ġsymbol s", - "Ġman ually", - "ε ÏĤ", - "Ġв Ñĸд", - "ĠF at", - "Ġlab els", - "Ġsophistic ated", - "ump s", - "Ġrele ases", - "Ġ4 7", - "ĠO M", - "ê°Ģ ë", - "ĠB ien", - "ĠRe f", - "è¨ ĺ", - "ĠSt a", - "ĠE gg", - "Ġindic ator", - "ps on", - "Ġnas ıl", - "R ight", - "Ġcon vey", - "Ġkn ot", - "Ġconnect s", - "ul as", - "Ġpre ced", - "Ġine quality", - "am iento", - "Ġrep ly", - "O Y", - "Ġdism iss", - "ĠëIJ ľ", - "çĦ ¡", - "ĠÑħоÑĢоÑĪ о", - "Ġm éd", - "Ġrandom ly", - "ĠO nt", - "u ard", - "Ġpull s", - "ĠÑĤ епеÑĢÑĮ", - "ĠNe ed", - "ĠSo ft", - "Ġstrength s", - "Ġgo ed", - "um en", - "æŃ »", - "Ġíİ ¸", - "Ġд об", - "Ġclar ity", - "ĠA i", - "Ġball oon", - "ĠP and", - "ĠìķĦ ëĭ", - "Ġsh iny", - "Ġsmall est", - "on ia", - "h ill", - "ot ing", - "Ġe ing", - "Ġmere ly", - "Ġse us", - "Ġн еп", - "Ġí Ĩµ", - "Ġgu ides", - "Ġspecial ist", - "Ġste ak", - "ãĤĪ ãģĨ", - "Ġmig ration", - "que le", - "Ġru ined", - "Ġpu pp", - "å¥ ³", - "Ġk end", - "ang an", - "Ġpal m", - "Ġunf air", - "Ġz m", - "ĠD V", - "ch ester", - "и Ñİ", - "Ġo oh", - "er g", - "AT H", - "° ©", - "åĵ ª", - "r ison", - "Ġinvol ving", - "Ġpart ly", - "anç ais", - "Ġv ow", - "Ġprom inent", - "Ġcry st", - "ib a", - "Ġdes erves", - "Ġover t", - "Ġsens it", - "ĠWh e", - "Ġtight en", - "Ġintim id", - "Ġal iment", - "w ill", - "Ġstrength en", - "ĠT an", - "åı Ī", - "ãģĹ ãģ¾ãģĻ", - "on i", - "ĠM un", - "Ġpro ph", - "Ġrehe ars", - "ĠK le", - "Ġve ces", - "Ġwonder ed", - "ok i", - "Ġsens es", - "´ì ĭ", - "Æ°á» Ľ", - "ĠÈĻ i", - "Ġmuch os", - "Ġwatch es", - "ortun ate", - "ĠJ uan", - "ìŀĸ ìķĦ", - "ÑĢ е", - "e i", - "ion en", - "Ġexperiment al", - "Ġda ughters", - "ภĽ", - "Ġment ally", - "bec ca", - "aw are", - "ìĦ Ŀ", - "Ġwhat soever", - "Ġen ables", - "ĠL ow", - "o id", - "ภĬ", - "ó d", - "Ø º", - "Ġconstruct ed", - "ĠLad ies", - "Ġaccus ed", - "Ġа н", - "D an", - "Ġsp awn", - "Ġcontain ers", - "Ġart istic", - "ı p", - "Ġdisc l", - "Ġaut res", - "in as", - "ĠN ation", - "Ġn ag", - "be an", - "w he", - "ľë ıĦ", - "ĠSe oul", - "Ġíı ¬", - "ĠN ich", - "Ġcomp lement", - "Ġinter ven", - "ĠMod el", - "ĠOr ange", - "nam on", - "Ġcalcul ation", - "se e", - "Ġusted es", - "Ġle b", - "Ġdo ct", - "Ñĸ н", - "Ġf oster", - "Ġel astic", - "ĠAh h", - "Ġa ce", - "ĠP ink", - "ĠJ eg", - "Ġde er", - "ãģĹ ãģĦ", - "s is", - "Ġjak o", - "ĠEm ma", - "ÑģÑĤв енно", - "Ġport rait", - "Ġmak er", - "Ġa ument", - "ÑĢ об", - "Ġairpl ane", - "Ġtransparen cy", - "Ġadjust ment", - "ĠCD C", - "ç on", - "Ġupload ed", - "Ġд ейÑģÑĤв", - "Ġго ÑĤов", - "Ġit er", - "Ġcur se", - "ô n", - "mer ce", - "ar an", - "Ġle ak", - "çµ IJ", - "Ġabs ence", - "Ñģ кий", - "Ġread ers", - "al er", - "Ġbene ath", - "ang o", - "h etic", - "Ġfin ns", - "Ġpo op", - "Ġdu plic", - "H i", - "ig s", - "olog ically", - "op p", - "Ġd izer", - "ĠAll en", - "Ġgl i", - "Ġacc eleration", - "Ġvit amin", - "ãĥ Ń", - "v ä", - "ĠAc cess", - "à® Ļ", - "r ás", - "Ġappreci ated", - "Ġn ah", - "Ġpos ter", - "Ġt ale", - "Ġhighlight ed", - "æĸ ĩ", - "ż eli", - "Ġblock chain", - "Ġmic row", - "Ġcin ema", - "ĠCh ang", - "ĠSe arch", - "ust ers", - "ĠZ ero", - "ĠDiv ision", - "ÑĢ аÑģ", - "Ġsca re", - "Ġj elly", - "ĠAdminist ration", - "S O", - "Ġl ined", - "Ġê° Ħ", - "Ġge ben", - "Ġso da", - "Ġwin ners", - "³ ¼", - "Ù Ĵ", - "ĠAm b", - "åķı é¡Į", - "å Ķ", - "Ġpe g", - "å· ±", - "4 3", - "Ġra us", - "Ġre wards", - "Ġinc lus", - "Ġhigh way", - "Ġha h", - "Ġmultipl ied", - "Ġs ẽ", - "Ġdisci ples", - "Ġn ing", - "Ġdress ing", - "Ġattrib utes", - "ĠM osc", - "ĠGree ce", - "Ġse k", - "ĠLe arn", - "Ġj us", - "rend re", - "Ġperson ne", - "pl ete", - "Ġpl acing", - "Ġl uego", - "ill ance", - "Ġоб Ñī", - "Ġprov ision", - "Ġl ion", - "t ra", - "bo ards", - "Ġbehavi our", - "he y", - "Ġsubscri ption", - "Ġprot agon", - "ãĥ £", - "Ġvar a", - "ĠÅŁ u", - "Ġha ha", - "Ġteas poon", - "æ Ł", - "av oir", - "Ġcrypt o", - "ĠÑģÑĤ аÑĢ", - "ĠSt ore", - "ab s", - "ĠStud ents", - "Ġla und", - "int o", - "Ġapproach ed", - "° ľ", - "ÑĥÑİ Ñī", - "ĠL abor", - "ot es", - "iat ric", - "Ġgro ÃŁ", - "ut ive", - "Ġи д", - "ĠG ib", - "Ġpl acement", - "ĠdifÃŃ cil", - "Ġf rog", - "ĠвÑģе Ñħ", - "ĠJ r", - "az ed", - "Ñĥ Ñī", - "Ġê ¼", - "fr ame", - "а еÑĪÑĮ", - "Ġlock down", - "åij ³", - "Ġmed i", - "Ġ×Ķ× ŀ×", - "ени й", - "em ale", - "ì¢ ħ", - "ater al", - "Ġdist ant", - "Ġbe ars", - "Ġjournal ist", - "è§ £", - "ĠMarsh all", - "ĠIh nen", - "uet ooth", - "b ag", - "ĠÄij ã", - "ĠHigh ness", - "Ġì° į", - "и ка", - "ĠW u", - "ĠFr an", - "Ġp eng", - "Ġf on", - "Ġhypothes is", - "ĠÑĢ Ñĥ", - "Ġl y", - "× ļ", - "ìĽ Ķ", - "ĠRad io", - "ภŀ", - "D av", - "Ġembarrass ing", - "ĠìŀĪ ìĸ´", - "Ġcast ing", - "Ġc age", - "ĠP sych", - "ĠìĿ¼ ëĭ¨", - "ĠÅ ¾", - "im b", - "Ġdirect ors", - "S H", - "ĠÏĦη ν", - "á»ģ u", - "Ġkon uÅŁ", - "Ġoption al", - "quar ters", - "ik er", - "ĠS ant", - "Ġvers es", - "ë ¶Ģ", - "Ġo lar", - "ĠÏ ĩ", - "ãĥ ķ", - "Ġγ ια", - "ĠI mm", - "Ġcontrovers ial", - "Ġer sten", - "Ġreci p", - "ĠChristian ity", - "Ġê´ ľ", - "ord on", - "×ķ× ©", - "Ġsl ash", - "ĠP f", - "Ñĥд ÑĮ", - "×ķ× Ŀ", - "ĠPer ry", - "Ġm amy", - "Ġbackground s", - "Ġà®İ ன", - "Ġpend ant", - "ĠColumb ia", - "Ġin verse", - "ĠÑĩеÑĢ ез", - "Ġs v", - "Ġdig ging", - "4 1", - "ch em", - "Ġnavig ation", - "ĠSh in", - "ĠFr ont", - "P D", - "Ġbe aring", - "ĠW asser", - "Ġw ax", - "ĠCH RIS", - "ch ing", - "Ġpress ed", - "E l", - "ĠD al", - "ons in", - "Ġb inding", - "Ñģк ой", - "po ons", - "Ġmo ck", - "are st", - "к ÑĢа", - "M M", - "Ġcor rupt", - "st orm", - "Ġref res", - "ĠCo ach", - "ll ä", - "ĠTH IS", - "Ġpar ag", - "Ġìĵ °", - "p ool", - "Ġbill ions", - "Ġê¹ Ģ", - "gr oup", - "Ġwel coming", - "cell ence", - "ĠDu ke", - "ê¸ ´", - "Ġprim era", - "ìł ¸", - "Ġp ond", - "Ġstat ue", - "Ġêµ ¬ë", - "Ġh atch", - "Ġinstrument al", - "Ġresident ial", - "ì» ¤", - "Ġaccept ing", - "osh i", - "d ate", - "ĠìĶ ¨", - "Ġplant ed", - "Ġj oking", - "Ġì Ħľ", - "Ġh ated", - "ĠÑĢаÑģ Ñģк", - "Ġsle pt", - "Ġpack ages", - "Ġisland s", - "es en", - "ÄŁ ı", - "Ġdi agon", - "ĠO sc", - "Ġmes h", - "Ġsc ales", - "ar ity", - "ĠDef ense", - "ãģ¡ ãĤĩ", - "ĠLew is", - "ĠÑģ егоднÑı", - "Ġfl ies", - "uin ely", - "ĠCons ider", - "Ġst ark", - "he w", - "ĠAs ÃŃ", - "³ ´ë", - "Ġprop ose", - "Ġíķĺë ©´", - "od o", - "ĠNorm ally", - "Ġhe eft", - "ĠHarr is", - "g ro", - "ĠBlo od", - "b ase", - "Ġi OS", - "Ġtouch es", - "Ġinsp ir", - "Ġ× ĵ", - "Ġb inary", - "Ġì¶ Ķ", - "Ġser ial", - "Ġ ion", - "Ġunemploy ment", - "Ġodd s", - "ĠF ab", - "ĠF BI", - "BR UN", - "Ġweight s", - "ν ο", - "at ile", - "Ġnurs es", - "Ġinvolve ment", - "ĠíĶ ¼", - "Ġgovern ance", - "Ġâ Ĥ¬", - "ÑĢÑĥ п", - "ier ra", - "íĺ ķ", - "ĠJ erry", - "Ġbe ard", - "Ġsal vation", - "ĠAl ong", - "g entle", - "ĠK i", - "b ol", - "ĠPl at", - "Ġhas ht", - "è¿ ij", - "Ġw are", - "Ġpart ie", - "y cz", - "Ġint r", - "F ih", - "n ent", - "Ġche at", - "il en", - "Ġë ¯", - "or ie", - "Ġfá cil", - "et ric", - "Ġaffect ing", - "unci ation", - "Ġaff airs", - "Ġbe e", - "Ġview ing", - "Ġor ang", - "ĠL an", - "ĠС ÑĤ", - "ä¸ ĸ", - "ĠM es", - "ĥ ģ", - "er ie", - "Ġes pa", - "Ġinter pre", - "Ġposs ess", - "Ġpure ly", - "rit o", - "f ound", - "as ma", - "ìłģ ìĿ¸", - "Ġexam ine", - "ĠÑĥ м", - "Ġbes ch", - "ĠTom orrow", - "ĠB lock", - "Ġvari ant", - "Ġprefer ence", - "Ġcoach es", - "Ġmedic ations", - "Ġíĺ Ħ", - "Ġemp ire", - "ë Ħ¤", - "ĠIll inois", - "Ġcris py", - "Ġth ì", - "Ġbe es", - "7 7", - "Ġgl ow", - "è º", - "ĠStud ies", - "åIJ Ħ", - "ĠChall enge", - "Ġunlike ly", - "Ð §", - "ıy orsun", - "DI E", - "Ġminim ize", - "iz ard", - "Ġú n", - "Ġencont rar", - "ĠK ill", - "å »", - "Ġvan illa", - "ĠGr ant", - "ĠG T", - "se a", - "Ġs ought", - "в од", - "Ġnä m", - "ĠA unt", - "OW N", - "Ġpump kin", - "st ellen", - "Ġr ag", - "ег да", - "Ġstory t", - "Ġfor um", - "æ© Ł", - "Ġestab a", - "uch e", - "Ġcon gress", - "ĠRe y", - "Ġdram atically", - "ĠSp ort", - "ĠYe llow", - "Ġê³Ħ ìĨį", - "Ġdisg usting", - "ĠRe cent", - "Ġacqu ired", - "Ġc ables", - "çĶ ļ", - "d in", - "Ġv isto", - "Ġcommunic ating", - "ÑģÑĤав лÑı", - "еÑģ ÑĤо", - "ãĥ»ãĥ» ãĥ»", - "Ġré g", - "Ġso cks", - "Ġpro ces", - "be cause", - "Ġut ter", - "Ġcoloc ar", - "Ġnew est", - "Ġgr amm", - "è¡ ¨", - "ä¸į çŁ¥éģĵ", - "Ġsh ifting", - "Ġcar rier", - "ĠÑģк оÑĢ", - "ĠSch w", - "Ġexec uted", - "Ġmaint ained", - "ĠÏ Ĩ", - "ĠM oses", - "Ġdis se", - "Ġhor r", - "ãĢ ľ", - "Ġr ally", - "Ġall em", - "ĠEvent ually", - "Ġdi yor", - "lv ania", - "Ġsch nell", - "Ġê³ ¼", - "Ġë§ ¤", - "Ġstrugg les", - "l ate", - "Ġclar ify", - "é ment", - "Ġmulti plic", - "иб о", - "Ġjour n", - "Ġfra gr", - "Ġsurprising ly", - "Ġdesper ate", - "5 2", - "Ġs ul", - "ĠRe ad", - "ĠF ried", - "Ġm ond", - "w oo", - "Ġorgan izing", - "ãģĹãĤĩ ãģĨ", - "ĠSo on", - "Ġв опÑĢоÑģ", - "ĠN ur", - "ĠÐĹ Ð´", - "Ġsp ider", - "е ÑģÑı", - "Ġtutorial s", - "Ġnutri ents", - "or er", - "Ġcoe fficient", - "Ġarrange ment", - "Ġpr icing", - "n an", - "y u", - "B L", - "Ġtri be", - "ĠHow ard", - "un ks", - "Ġnew er", - "Ġprov in", - "Ġpred iction", - "h os", - "Ġol sun", - "ĠAr ound", - "Ġv ier", - "ĠÑģÑĤоÑĢ он", - "Ġv alley", - "ĠE la", - "if i", - "Ġgal axy", - "Ġtran qu", - "Ġad vers", - "ĠTem ple", - "iff s", - "ig ence", - "èĩª å·±", - "Ġkön nte", - "ĠÄij ó", - "D id", - "Ġphotograph s", - "ĠA WS", - "ÑĨи Ñı", - "Ġgu ards", - "Ġappoint ed", - "ĠG il", - "Ġм ом", - "Ġc od", - "ĠUn like", - "Ġeven ly", - "isc onsin", - "Ġest ou", - "Ġm nie", - "ĠEx ec", - "ĠM V", - "ĠE ine", - "ä¿ ¡", - "ĠRog er", - "ĠF ac", - "ĠL ist", - "Ġf uer", - "аеÑĤ е", - "om ed", - "Ġattract ion", - "èī ²", - "Ġter rain", - "ĠD rop", - "Ġcorpor ations", - "Ġsci ences", - "Ġthr one", - "ãģĦ ãģŁ", - "Ġa j", - "ĠR ot", - "çī ¹", - "Ġsupp orters", - "ĠB ere", - "H ere", - "Ġdifer entes", - "Ġsignific ance", - "Ïĥ η", - "æĪij 覺å¾Ĺ", - "Ġcl amp", - "Ġë ĮĢë", - "Ġfab ulous", - "re z", - "æĮ ģ", - "Ġassum ptions", - "ut her", - "w id", - "p ot", - "è¿ İ", - "Ġy an", - "ul in", - "ÑĢ Ñĭв", - "ĠSl ow", - "ĠPenn sy", - "Ġíķ ´ìĦľ", - "Ġme io", - "Ġwealth y", - "ĠE ight", - "Ġpul se", - "Ġfr iction", - "id ity", - "ĠH oll", - "i yorum", - "Ġsound ed", - "ĠC arr", - "Ġfor k", - "â ĺ", - "ĠP A", - "Ġcons pir", - "Ġc oding", - "r t", - "ĠTy p", - "Ġìĸ ij", - "Ġп ог", - "Ġmis er", - "ĠÑģм оÑĤÑĢ", - "ĠSw eden", - "Ġolar ak", - "ĠZh ang", - "ĠCh i", - "ĠT itan", - "Ġscreen ing", - "ĠSp ider", - "ĠÅŀ imdi", - "Ġobst acles", - "lar a", - "Ġchalleng ed", - "p se", - "T ON", - "á» ¥", - "ĠP i", - "Ġlag i", - "ie urs", - "Ġhur ting", - "Ġneg lect", - "Ġgener ating", - "Ġyoung est", - "Ġaud it", - "ĠÑĢ ез", - "Ïģ ά", - "Ġdon ate", - "ĠPD F", - "Ġvis its", - "Ġcru ise", - "P P", - "as er", - "Ġw sp", - "back s", - "iv als", - "ãģĨ ãĤĵ", - "Ġde ve", - "Ġprop ort", - "Ġc ath", - "ĠE ffect", - "Ġwind s", - "ĠìĻ Ķ", - "Ġchart s", - "Ġs ama", - "Ġautom ation", - "Ġпок а", - "Ġol an", - "Ġbo ats", - "Ġca fe", - "Ġden ied", - "ĠM ama", - "Ġblock ing", - "ĠTh or", - "Ġphenomen al", - "Ġstake holders", - "Ġun os", - "Ñĥ еÑĤ", - "ĠAb raham", - "ãģ§ ãĤĤ", - "Ġdetect ion", - "Ġjur is", - "Ġpower ed", - "z ial", - "Ġwel fare", - "Ġup grad", - "Ġmoż na", - "ĠC ase", - "c ular", - "Ķ ìĿ´", - "ãĥ ģ", - "ĠGu ess", - "Ġcy cles", - "ä¾ ĭ", - "çµ ¦", - "ro ck", - "um i", - "Ġel ite", - "Ġqu è", - "åł ±", - "ÑĤ ом", - "Ġsh ore", - "gun ta", - "Ġk u", - "Ġfaith ful", - "ĠJ eremy", - "a id", - "à ·", - "ug al", - "å°į åķĬ", - "ĠV el", - "Ġvra i", - "st ell", - "¨ ¸", - "Ġk ol", - "è ½", - "Ġquant o", - "Ġз аÑĢ", - "Ġ200 2", - "es y", - "Ġres erve", - "Ġмом енÑĤ", - "Ġdeploy ed", - "Ġdefin ing", - "Ġsa u", - "Ġga at", - "\" )", - "Ġtrans mit", - "Ġpubl ishing", - "Ġrank ing", - "Ġoff ense", - "Ġ4 6", - "p in", - "ĠT aking", - "Ġentit led", - "Ġgen uinely", - "Ġvari ations", - "Ġfind e", - "Ġt au", - "Ġunf ortunate", - "ĠR ah", - "port s", - "Ġc Å", - "Ġmon key", - "Ġbr ac", - "we i", - "l ung", - "Ġart if", - "Ġsy rup", - "ĠÐĶ ав", - "Ġlift ed", - "Ġche z", - "ĠAd vent", - "ĠSt ock", - "Ġdo l", - "м ен", - "иÑĪ ÑĮ", - "Ġy n", - "g io", - "d et", - "Ġdes se", - "Ġg ri", - "ĠChair man", - "ç ħ", - "Ġcu enta", - "an im", - "Ġcra b", - "Ġesc al", - "Ġpremi ère", - "ĠGe f", - "Ġd ining", - "Ġsevent h", - "Ġch asing", - "ĠT ower", - "Ġbrut al", - "Ġfundament ally", - "ãģ¨ ãģĨ", - "л ениÑı", - "st age", - "Ġacqu is", - "Ġcyl inder", - "Ġcomm ander", - "m em", - "ĠU V", - "ha ppy", - "Ġe psilon", - "Ġinv itation", - "Ġfar mer", - "ch air", - "Ġdest iny", - "Ġso vere", - "ĠHeb rew", - "Ġserv ant", - "Ġbe w", - "Ġg ast", - "ut ies", - "Ġadministr ative", - "ĠComm and", - "é ta", - "Ġnit rogen", - "ê· ¼", - "Ġab i", - "Ġvill ain", - "Ġblank et", - "ĠS end", - "Ġbeat en", - "² Ħ", - "Ġvol unt", - "Ġschol ar", - "ĠEm peror", - "Ġ4 3", - "v able", - "ĠD us", - "ĠG U", - "Ġtarget ing", - "ww w", - "Ġamend ment", - "ìĨ Įë", - "Ġt ing", - "Ġn asty", - "Ġg auge", - "ĠÑĢ од", - "ĠH ans", - "Y our", - "α ν", - "Ġpro jet", - "ĠHawai i", - "Ġsusp icious", - "Ġsch w", - "Ġremo val", - "Ġint rig", - "ĠM U", - "Ġp onto", - "ठ¾", - "Ġоб ÑĢаз", - "Ġguess ing", - "p ace", - "Ġm others", - "Ġmill imeter", - "л ение", - "没 æľī", - "Ġavail ability", - "ic z", - "æŃ ¤", - "Ġfr act", - "Ġbas es", - "k m", - "ĠB TS", - "ĠF ield", - "Ġd zie", - "Ġseg undo", - "ĠëĤĺ ëĬĶ", - "Ġlegit imate", - "im as", - "Ġв н", - "Ġcor ruption", - "Ġsm ash", - "ĠVal ent", - "Ġalign ed", - "ĠPennsy lvania", - "Ġg ab", - "ĠE un", - "ent h", - "ĠMor ning", - "Ġcand le", - "Ġback pack", - "ĠIslam ic", - "a ções", - "Ġenc ry", - "Ġmushroom s", - "íĮ Į", - "d it", - "Ġtrans it", - "ĠW isconsin", - "Ġparticip ated", - "ĠIl s", - "Ġunf old", - "¶ Ģë", - "Ġprof its", - "Ġwar ming", - "ĠG ang", - "Ġnetwork ing", - "Ġme ga", - "Ġthorough ly", - "le ments", - "ĠH m", - "Ġdec iding", - "Ġemotion ally", - "Ġexha usted", - "ĠÐŁ оÑĤ", - "c ido", - "ĠHT ML", - "Ġcopy right", - "Ġmel ody", - "y im", - "Ġand ers", - "osh op", - "Ġë³ ¼", - "Ġathlet e", - "ĠG E", - "Ġfrequ ent", - "Ġdes ires", - "Ġneed ing", - "ĠY un", - "Ġrif le", - "Ġlo ver", - "' T", - "Ġd ense", - "Ġt ão", - "Ġnot ified", - "Ġid i", - "ìĹ Ń", - "í Ĩ", - "Ġinteract ing", - "Ġrapp ort", - "еÑĢ и", - "s ki", - "Ġb esser", - "Ġmanufact urer", - "ĠK yle", - "Ġaccount able", - "ĠS ak", - "ĠP il", - "ĠD omin", - "Ġpres um", - "ĠÐĴÑģ е", - "Ġvine gar", - "Ġguarante ed", - "çľĭ åĪ°", - "Ġhand led", - "éŁ ³", - "c at", - "Ġcivil ization", - "Ġaccom p", - "ĠV M", - "é mon", - "Ġde ze", - "Ġgrad es", - "Ġsoll te", - "Ġst aring", - "×IJ× ª", - "ar nt", - "Ġhoriz on", - "Ġtrav ail", - "h our", - "第 ä¸Ģ", - "ĠE D", - "ĠD ak", - "Ġn y", - "Ġcon ve", - "ĠCh am", - "Ġfir ms", - "ĠL iu", - "ĠÑģÑĤ ÑĢан", - "Ġli bert", - "Ġlens es", - "Ġint ake", - "ĠвÑĭ б", - "Ġmens en", - "h el", - "Ġpract ition", - "Ġ3 50", - "ãĤ ³", - "F O", - "Ġbed s", - "Ġancest ors", - "ĠìĹĦ ì²Ń", - "Ġdistur b", - "ĠLast ly", - "ĠSupp ort", - "ี à¹ī", - "ĠCor ona", - "Ġenthus i", - "Ġвоз м", - "ĠìĤ¬ëŀ Įë", - "Ġ5 2", - "b ird", - "Ġredu ces", - "ĠìŀĪ ìĿĦ", - "ĠG ene", - "êµ IJ", - "ÄĻ p", - "ĠÃľ ber", - "Ġconcer ning", - "us er", - "Ġconcent rate", - "ĠWH AT", - "ish op", - "onym ous", - "no ld", - "Ġsuggest ing", - "© °", - "ĠF ish", - ".... ....", - "Ġvess el", - "Ġtrabaj o", - "ãģ µ", - "ĠO cean", - "å§ IJ", - "y g", - "Ġtown s", - "d el", - "Ġterr ifying", - "Ġçal Ä±ÅŁ", - "Ġs ino", - "Ġe ats", - "Ġge z", - "Ġg eme", - "ĠìĻ Ħ", - "Ġcomp art", - "Ġimplement ing", - "ĠPot ter", - "ĠGerm ans", - "Ġg ÅĤ", - "Ġt ennis", - "Ġcar pet", - "au er", - "ĠSaud i", - "ye ong", - "Ġcur ry", - "ĠFore st", - "Ñĭ л", - "Ġfif teen", - "Ġbol ts", - "Ġ{ \\", - "¬ ´", - "Ġsett lement", - "Ġl ange", - "Ġb am", - "G et", - "íķ Ļ", - "Ġsw ap", - "ĠK han", - "Ġcomm ence", - "Ġquar antine", - "Ġsc ored", - "ç ĸ", - "Ġ19 50", - "Ġthick er", - "Ġsû r", - "åı £", - "ĠLar ry", - "Ġall ez", - "ìĭľ ëĬĶ", - "Ġg ü", - "Ġspect acular", - "/ /", - "b oth", - "Ġst ats", - "å¦ ³", - "ĠN ancy", - "Ġbun u", - "Ġcr ust", - "Ġactiv ated", - "Ġê·¸ë ŀ", - "out he", - "Ġport s", - "Ġne ural", - "Ġj aw", - "Ġobserv ations", - "Ġvo it", - "ab an", - "ả i", - "¦¬ë ¥¼", - "om es", - "௠ĭ", - "qu i", - "Ġkind ness", - "Ð ij", - "Ġ4 1", - "Ġmoder ate", - "Ġang els", - "ĠT amb", - "è t", - "Ġch lor", - "ĠBill y", - "ì² ĺë", - "ac on", - "Ġselect ing", - "ĠDel ta", - "Ġn ull", - "den ly", - "Ġci ud", - "Ġtend ency", - "Ġbreak down", - "Ġm int", - "ÑĦ оÑĢм", - "or ph", - "Ġda wn", - "s pr", - "ĠW ILL", - "äch lich", - "Ġpu ppy", - "7 00", - "Ġà® ¤", - "Ġfail s", - "ĠCon c", - "Ġrel atives", - "Ġinv iting", - "Ġaut onom", - "Ġcomp osed", - "Ġun ity", - "Ġdec is", - "Ġaccess ories", - "ĠC ass", - "Ġb ist", - "ĠT ip", - "ì§ ¸", - "Ġp unt", - "Ġr áp", - "éĢ ²", - "AN K", - "ãģ ļ", - "ex ist", - "Ġcompat ible", - "Ġn er", - "Ġе мÑĥ", - "Ġa plic", - "Ġb apt", - "Ġfail ing", - "ĠTam am", - "Ġos cill", - "Ġletz ten", - "Ġrepeated ly", - "Ġjung le", - "ĠP ush", - "h ai", - "ĠÎ ·", - "Ġdead ly", - "Ñı ж", - "wi Äħ", - "ĠComm on", - "ĠÎ ķ", - "Ġsk ate", - "T C", - "ĠMin i", - "Ġhob by", - "ầ n", - "Ġrout es", - "Ġam igos", - "Ġcon jun", - "Ġpartners hips", - "Ġno vo", - "Ġa ver", - "Ġpou vez", - "br idge", - "Ġpre oc", - "h im", - "Ġtur b", - "Ġso b", - "ĠSn ap", - "Ġì° ¸", - "min ute", - "Ġtra ject", - "uj ÄĻ", - "Ġe ager", - "Ġregul atory", - "Ġbank ing", - "b ling", - "ÑĪ ÑĮ", - "a ż", - "Ġbiz arre", - "it ated", - "d ire", - "Ġthreat ened", - "Ġsh ining", - "Ġn esse", - "Ġcor ps", - "ĠÑģ Ñĥ", - "Ġt eles", - "Ġtem p", - "t em", - "Ġк ан", - "Ġfe ver", - "N ew", - "Ġheav ier", - "ĠS ah", - "b ud", - "Ġout ros", - "Ġì° ¾", - "Ġëª ħ", - "arr ing", - "Ġê´ľ ì°®", - "ĠN ap", - "Ġse min", - "ĠTh an", - "if s", - "Ġdes en", - "ĠÑĤак ое", - "Ġlos es", - "ĠB alt", - "k on", - "Ġнап ÑĢ", - "Ġvo is", - "ĠMosc ow", - "Ġch airs", - "h is", - "Ġrefuge es", - "k g", - "Ġk ole", - "į ¨", - "аÑģ ибо", - "¦ ½", - "ĠUn iverse", - "ĠDire ct", - "Ġche ating", - "ĠC in", - "Ġpat ri", - "Ġadv ise", - "ĠN ether", - "Ġprime iro", - "Ġmention ing", - "n ut", - "5 6", - "ar ı", - "Ġpet ite", - "b led", - "Ġpens ar", - "ic io", - "IN D", - "Ġveter an", - "Ġlad der", - "Ġconsequ ence", - "ож ал", - "ĠB urn", - "Ġr ug", - "ĠM ade", - "Ġg it", - "\" ...", - "Ġcompet itors", - "Ġprz ed", - "Ġapp arent", - "ĠArgent ina", - "ĠWork ing", - "Ġcollabor ate", - "w oman", - "Ġret ain", - "Ġle urs", - "Ġdash board", - "×Ļ× ĵ", - "ĠEar ly", - "B M", - "Ġе Ñij", - "ол ог", - "Ġsatisf ying", - "Ġoft entimes", - "Ġma pping", - "ünk ü", - "ar th", - "f old", - "Ġlaunch ing", - "Ġa ura", - "Ġprec ision", - "work s", - "G od", - "Ġstra p", - "ĠIm per", - "Ġr ivers", - "Ġ |", - "Ġcu er", - "reg on", - "Ġarri val", - "ка Ñħ", - "ĠM iami", - "ан Ñĭ", - "Ġsurviv ors", - "ĠSen ior", - "Dav id", - "Ġest ado", - "Ġse ctors", - "Ġpop ping", - "Ġch im", - "ay ı", - "Ġkun nen", - "Ġgall ery", - "Ġsun light", - "ese hen", - "Ġye lling", - "ĠMe in", - "ĠPho enix", - "Ġman o", - "Ġhistor ia", - "Ġoccur ring", - "æ¬ ¸", - "ì ¸", - "ад и", - "å¾ ħ", - "Ġinstitution al", - "ĠT ut", - "ç ²", - "Ġsl aves", - "ãģ© ãģĨ", - "Ġforg iveness", - "Ġtw in", - "ĠHy un", - "н ÑĮ", - "ĠK omm", - "and ra", - "sh ot", - "ss ä", - "ĠÑĨ е", - "at ta", - "Ġexp ense", - "ĠG PU", - "ĠP ast", - "rib ly", - "ĠëŃIJ ìķ¼", - "Ġгод а", - "Ġresp ir", - "æĿ ±", - "ĠQue ens", - "h ops", - "Ġs érie", - "Ġpre f", - "Ġcom ed", - "Ġpl ut", - "ĠOver all", - "Ġãģ Ŀ", - "Ġc ush", - "Ġring ing", - "Ġincor rect", - "ĠÑģÑĤ ÑĢ", - "Ġgeomet ry", - "Ġadvert is", - "ĠÐ ¨", - "Ġreview ed", - "ãģĤ ãģĤ", - "Ġdo zens", - "Ġdeterm ination", - "ĠPh ill", - "Ġcontrib uted", - "ĠC it", - "Ġpass engers", - "Ġcôt é", - "Ġre ver", - "Ġtechn ological", - "Ġall en", - "Ġr aining", - "av i", - "Ġsal ty", - "Ġtyp ing", - "ĠÑĤ е", - "Ġt ilt", - "Ġì¹ ĺ", - "Ġо ÑĢ", - "ĠпÑĢ Ñıм", - "Ġr ou", - "Ġare na", - "ar at", - "åĪ «", - "HH HH", - "Ġmanufact urers", - "ĠEd ward", - "Ġt uck", - "Ġbl ows", - "ing o", - "ĠMar c", - "ìķĦ ìĦľ", - "M ich", - "ĠCle an", - "è ´", - "est o", - "ĠP ack", - "Ġsha ft", - "BRUN O", - "Ġa ven", - "u ur", - "Ñģк олÑĮко", - "ê´ Ģ", - "Ġautom ated", - "Ġvent ure", - "Ġsurve illance", - "ĠG row", - "ĠE mer", - "Ġд оÑĢ", - "Ġinvest or", - "ĠY ok", - "Ġl atter", - "ĠN I", - "Ġfunction ing", - "ĠHam ilton", - "Ġ5 1", - "Ġmurder ed", - "Ġanch or", - "Ġc uc", - "ĠSC P", - "ĠMad am", - "Ġconstra ints", - "Ġb arn", - "ank en", - "Ġë§İ ìĿĢ", - "ĠMot or", - "ĠDo ing", - "Ġam en", - "et ts", - "Ġinst ructor", - "eg t", - "ak o", - "Ġpost ure", - "iv ia", - "ĠPol ish", - "Ġдв а", - "Ġcolor ful", - "Ġel bow", - "Ġpar le", - "Ġpass er", - "Ġcond em", - "ort al", - "Ġfert il", - "ا د", - "ĠCol omb", - "Ġalign ment", - "Ġastron aut", - "ĠM ut", - "Ġsal mon", - "Ġstructure d", - "ŀ ר", - "Ġclick s", - "Ġm iej", - "æĶ ¿", - "ãģĦ ãĤĦ", - "ĠR ound", - "Ġrain bow", - "ĠV A", - "ãģĶ ãģĸ", - "ì§ Ī", - "ot z", - ", ", - "Ġch ords", - "ĠSand ers", - "Ġë¶ Ħë", - "B en", - "Ġdar über", - "ili ans", - "Ġorder ing", - "ĠMan h", - "Ġkil ogram", - "Ġkar ÅŁ", - "Ġgr asp", - "Ġghost s", - "al en", - "ĠJ edi", - "Ġб ли", - "Ġdownload ed", - "Ġconduct ing", - "ĠH ak", - "Ġresearch er", - "il an", - "go od", - "ĠH annah", - "ĠdÃ¼ÅŁ ün", - "ĠMess iah", - "u ity", - "ion a", - "Ġprob able", - "ĠY E", - "Ġindepend ently", - "Ġbuff er", - "b urn", - "our d", - "ĠMc K", - "Ġl ingu", - "uj emy", - "еÑĢ ÑĤ", - "Ġintuit ive", - "Ġcrack s", - "app ropri", - "nt y", - "Ġge en", - "Ġl end", - "Ġcert ification", - "ID S", - "un ter", - "pe es", - "Ġtr ump", - "Ġbank rupt", - "Ġfe as", - "è Ĺ", - "Ġdu ż", - "æ¸ ħ", - "Ġvirus es", - "Ġ5 8", - "g od", - "Ġж ел", - "Ġst alk", - "I nd", - "ach i", - "ĠC F", - "ĠC ond", - "Ġsan ct", - "Ġcont en", - "Ġfre ed", - "ĠR T", - "Ġment ors", - "ì¡ ±", - "Ġport able", - "ĠPaul o", - "r ane", - "HA HA", - "ĠS ection", - "ç Ĩ", - "hy un", - "ĠÎŃ Ïĩ", - "ĠP ub", - "ĠInd epend", - "Ġcomp ounds", - "ĠÑģ Ñĭ", - "Ġmess aging", - "Ġded ication", - "Ġnot icing", - "Ġdevot ed", - "ÑİÑĤ ÑģÑı", - "Ġsn akes", - "Ġbattle field", - "p ers", - "Ġdel a", - "9 2", - "Ġha i", - "ill ä", - "ér er", - "e very", - "Ġrespons ive", - "×Ļ ×ķ", - "op f", - "é ī", - "Ĭ ¸", - "Be cause", - "Ġtour ism", - "Ġê·¸ ê²Į", - "×ķ× ¦", - "Ġcan s", - "st üt", - "Ġdon ne", - "ĠD ios", - "ĠU ber", - "act ory", - "Ġorient ed", - "ĠH erm", - "Ġpat ron", - "ur f", - "be i", - "Ġprogram a", - "ĠOh h", - "gen er", - "Ġf ist", - "ĠW endy", - "Ġand a", - "Ġguess ed", - "Ġfre ak", - "ä¸Ń åľĭ", - "ĠK ings", - "ch ool", - "Ġoff line", - "ĠIndian a", - "ĠAll iance", - "Ġ5 3", - "Ġpartic ul", - "ĠF ocus", - "Ġinhab it", - "Ġê°ĻìĿĢ ëį°", - "ĠMc G", - "ows ki", - "ĠìĿ´ ê±´", - "Ġpa ÅĦst", - "он и", - "itt a", - "Ġconfirm ation", - "ĠBrook lyn", - "Ġnood le", - "f und", - "it ud", - "Ġgrand parents", - "Ġbar becue", - "ει ÏĤ", - "Ġ á", - "Ġball ot", - "ĠV eter", - "Ġpip es", - "ig ious", - "ĠG raph", - "est ed", - "Ġë¸ Įë", - "ĠK E", - "ãģ¡ãĤĩ ãģ£ãģ¨", - "Ġe ins", - "Ġhat red", - "ãģij ãģ©", - "Ġd ang", - "ee ee", - "Ġarch ae", - "ĠJes se", - "Ġdetect ed", - "Ġsen i", - "burg h", - "Ġdispl acement", - "Ġdo p", - "Ġcondition ing", - "Ġне ÑģколÑĮко", - "Ġdistur bing", - "P H", - "Ġthin ner", - "Ġwound ed", - "ĠCu ando", - "Ġcush ion", - "Ġwh ites", - "Ġprefer ences", - "Ġì¤Ģë ¹Ħ", - "Ġka ż", - "ĠG ate", - "ĠP ath", - "d les", - "à¸Ħ ร", - "im ore", - "Ġë³´ìĹ ¬", - "Ġdiscipl ines", - "á» ı", - "Ġmes ma", - "Ġìĥ Īë", - "Ġìĭ ¬", - "Ġg ing", - "Ġumbre lla", - "IGH T", - "Ġp ension", - "Ġcomb ining", - "S S", - "Ġrect angle", - "á»ĩ t", - "Ġpro xim", - "ĠC ow", - "¸ Į", - "Ġintention al", - "æķ Ļ", - "Ġdec id", - "ĠÑģк аж", - "ĠU ma", - "ias m", - "b uz", - "Ġdebr is", - "Ġc ass", - "ĠP rop", - "is ka", - "ë ł¥", - "ester ol", - "uss ian", - "ìĿ´ë ŀij", - "Ġun limited", - "Ġadm ire", - "Ġtight ly", - "Ġgen ome", - "ĠJun ior", - "ven ir", - "g us", - "Ġc Äĥ", - "ĠV lad", - "Ġí Ĥ", - "Ġrel ativ", - "in ci", - "Ġaun que", - "ĠBo ys", - "ÑĨи он", - "ĠSw iss", - "Ġphys icians", - "Ġíı ī", - "ĠP ET", - "Ġw ounds", - "ab out", - "Ãł i", - "on z", - "ur ities", - "ĠÑĥв ид", - "å· ¦", - "Ġment ality", - "Ġvari ance", - "Ġseg unda", - "Ġvol cano", - "al ie", - "ॠĩ", - "Ġt iles", - "ĠT erry", - "ĠاÙĦÙĦ Ùĩ", - "Ġcan on", - "Ġsc attered", - "pt on", - "Ġdefin itions", - "Ġal gebra", - "ot en", - "ab lo", - "ij uana", - "Ġwra pping", - "Ġses ame", - "ĠнаÑĩ ина", - "ĠAl f", - "ĠÐł оÑģÑģ", - "or no", - "Ġan kle", - "Ġspecial ty", - "Ġattempt ing", - "ili ation", - "Ġ19 20", - "Ġphen omena", - "ĠPro duct", - "ĠB uck", - "ĠA ww", - "se en", - "Ġvo id", - "ĠFrank lin", - "Ġadvoc acy", - "ĠS ep", - "Ġcool est", - "ĠÑģ ÑĢазÑĥ", - "ĠQu and", - "Ġ9 00", - "ĠTr ad", - "d ies", - "Ġhas h", - "æĪij å°±", - "ä¹Ł æĺ¯", - "Ġpot s", - "Ġsad ly", - "Ġvi able", - "ĠT iger", - "ĠON E", - "Ġneur ons", - "ow anie", - "Ä Ĺ", - "ĠSh ar", - "ĠLand es", - "Ġconfer ences", - "è© ²", - "Ġcred ential", - "Ġl ime", - "ine e", - "x it", - "p ay", - "Ġinc ons", - "Ġ>> :", - "èª į", - "Ġí ŀĺë", - "Ġless er", - "Ġsp ill", - "Ġprem ise", - "Ġ36 5", - "ĠH ost", - "Ġtom ar", - "×IJ× ľ", - "ë ²Ī", - "ĠWhat s", - "Ġlight weight", - "ĠM ap", - "f ia", - "ells chaft", - "Ġvend ors", - "uest o", - "ĠM ister", - "ĠÐŁ ÑĢи", - "åı ³", - "h ma", - "Ġintention ally", - "ĠT ang", - "éĹ ®", - "Ġident ification", - "Ġetc etera", - "ĠN ee", - "ĠÑĤ ÑĢи", - "ê· ¸", - "Ġcrypt ocur", - "Ġin hale", - "Ġadd ict", - "åIJĦ ä½į", - "Ġma u", - "ĠÑĤак аÑı", - "Ġë² Ħ", - "Ġcomp rar", - "ied zieÄĩ", - "ĠоÑĤ но", - "Ġbegin ner", - "Ġм Ñĥж", - "Ġobs c", - "Ġlim iting", - "asc ular", - "Ġins pection", - "ac i", - "Ġre jo", - "M us", - "Ġz aten", - "Ġsz cz", - "ĠMad rid", - "Ġvar ieties", - "Ġest Ãł", - "ĠSh akes", - "Ġk its", - "Ġad minister", - "Ġla va", - "Ġg Ã¥", - "è© ¦", - "ת ×Ļ", - "ĠWay ne", - "Ġinst agram", - "Ġr ated", - "p aper", - "Ġb ild", - "Ġpret ending", - "Ġobser ving", - "ĠÑģам ом", - "Ġtr or", - "Ġorgan isms", - "Ġfal ta", - "Ġh ometown", - "ç ±", - "Ġí ĭ", - "Ġche g", - "Ġì ¡", - "Ġcomm a", - "is é", - "Ġlike lihood", - "av ored", - "Ġgel di", - "ни ков", - "Ġmed io", - "Ġjak ie", - "ĠJ up", - "Ġgreen house", - "Ġsp it", - "ко е", - "Ġк аж", - "ĠG ram", - "ĠCon ference", - "Ġdef icit", - "s ın", - "in se", - "u ÄŁ", - "Ġr icht", - "Ġcoinc idence", - "åı į", - "Ġeu rop", - "Ġbutter fly", - "p read", - "Ġìĸ ¼", - "èĢ ¶", - "Ġwa vel", - "ĠIn fin", - "ĠPlan et", - "Ġself ie", - "ient ras", - "Ġar rog", - "os er", - "id al", - "ł×Š׳×ķ", - "üt ün", - "Ġfresh man", - "ĠMach ine", - "Ïĥ ÏĦ", - "ĠD ia", - "ìĿ´ ëĭ¤", - "ãģĵ ãģĨ", - "ne a", - "Ġlist ing", - "Ġconfig ure", - "ut or", - "U p", - "ts chaft", - "ri ère", - "Ġup wards", - "ĠÑħоÑĩ Ñĥ", - "Ġswe ep", - "B r", - "Ġexpress ing", - "Ġun happy", - "Ġmand atory", - "g ender", - "ĠA ÃŃ", - "Ġindic ators", - "Ġoil s", - "n ote", - "Ġseg ur", - "ож еÑĤ", - "yn asty", - "Ġdist ances", - "Ġmer ge", - "BER T", - "Ġsur render", - "Ġbu at", - "ĠA wards", - "Ġseñ or", - "od ox", - "Ġfl avour", - "Ġab dom", - "Ġconfig ur", - "8 6", - "ĠDI Y", - "Ġrig id", - "° ĺ", - "Ġcorpor ation", - "Ġg room", - "j aw", - "ĠNe ar", - "ил о", - "Ġoper a", - "ĠIn nov", - "и ÑĢа", - "ĵ ±", - "Ġspec ified", - "Ġcos m", - "ĠFre edom", - "Ġcl own", - "ĠN em", - "Ġв ол", - "Ñij н", - "Ġchar ger", - "à¹ģ ล", - "Ġinflu ential", - "äs ident", - "é ¤", - "ĠìĦ łë", - "Ġvol umes", - "æ IJ", - "Ġout ras", - "ĠTw itch", - "Ġfound ing", - "Ġa while", - "Ġco il", - "ê° Ļ", - "Ġc ả", - "ĠTh row", - "ĠH ence", - "omm t", - "ĠBen jamin", - "глÑı д", - "T ime", - "ob ic", - "Ġm our", - "Ġd read", - "ĠL Ãł", - "ĠCh ile", - "Ġpre val", - "Ġv ain", - "Ġart ık", - "Ġpres erved", - "ĠоÑĤ д", - "Ġware house", - "Ġbest e", - "ĠSever al", - "ĠS ituation", - "Ġcard board", - "T od", - "er na", - "Ġgar ant", - "Ġgest ure", - "Ġh en", - "Ġspe lling", - "ose xual", - "Ġan ne", - "Ġm ice", - "ĠMe ine", - "c ard", - "Ġre bell", - "Ġcert o", - "Ġìľ łë", - "Ġvers chied", - "ĠB os", - "Ġinv ention", - "Ġtr ze", - "Ġman ière", - "ĠCh ad", - "Ġsp re", - "Ġorganis ations", - "Ġpoor ly", - "Ġan terior", - "Ġst air", - "к ÑĢ", - "Ġatom ic", - "Ġsymp ath", - "Ġcontin ually", - "Ġkle ine", - "è te", - "и Ñī", - "ο ÏĤ", - "pe ut", - "Ġrep osit", - "Ġent ra", - "E m", - "Ġfinan cing", - "Ġмн ог", - "Ġthe sis", - "ĠCom puter", - "e au", - "ĠT ree", - "Ġbr ide", - "ons ieur", - "sh ire", - "w ic", - "D E", - "ĠìĪ ĺë", - "Ġac om", - "ĠP O", - "ers ch", - "Ġпом оÑī", - "ĠAr men", - "Ġì£ ½", - "Ġz or", - "Ġprint s", - "ĠD ass", - "æ¸ ¯", - "Ġdur able", - "ĠTrans port", - "ìŀIJ ê°Ģ", - "Ġл ег", - "Ġdé t", - "ô le", - "am ous", - "Y N", - "Ġcl iff", - "Ġgramm ar", - "ĠÐŁÐ¾ ÑįÑĤомÑĥ", - "ĠlÃł m", - "es ch", - "Ġmiser able", - "Ġvol ts", - "ĠC ad", - "uk an", - "ÑĤ ив", - "r ust", - "Ġìĺ¬ë Ŀ¼", - "Ġver k", - "Ġchick ens", - "ĠY oo", - "Ġout fits", - "c ode", - "Ġhier archy", - "net es", - "Ġcounter part", - "Ġt ôi", - "Ġt ed", - "ĠB art", - "Ġë Ŀ¼", - "ĠGen au", - "Ġinc oming", - "ĠA BC", - "ri que", - "ĠоÑĤ п", - "qu al", - "Ġincent ive", - "Ġih ren", - "׳ ×Ļ", - "lo e", - "Ġ19 30", - "Ġbar g", - "Ġd iction", - "Ġön ce", - "IN S", - "Ġre h", - "isia j", - "m outh", - "Ġsc oring", - "l ık", - "ĠìķĦ 주", - "OR IA", - "ĠEst ados", - "Ġcompan ion", - "Ġasse mble", - "Ġpun ished", - "Ġit al", - "Ġprev ents", - "ist es", - "ĠKent ucky", - "Ġloc ate", - "Ġfast ing", - "ãģ¨ æĢĿ", - "ĥ Ģ", - "ĠSe b", - "ĠCr own", - "op ia", - "Ġwh ip", - "us z", - "к ами", - "Ġdatab ases", - "åŃ Ĺ", - "Ġprose c", - "Ġ199 7", - "ĠìĤ´ì §Ŀ", - "ĠSol ar", - "ĠP ues", - "ĠZ en", - "oll o", - "ĠG uru", - "Ġsque ez", - "ĠÐĹ Ð°", - "ĠÄ į", - "cept ions", - "c ca", - "iz able", - "m and", - "Ġbreak through", - "Ġtables poon", - "ĠS EC", - "ik h", - "ĠS ão", - "Ġп ло", - "am en", - "Ġpr ac", - "Ġdar ling", - "Ġtall er", - "Ġrend ering", - "Ġìļ°ë¦¬ ê°Ģ", - "ĠÏĦη ÏĤ", - "Ġm ã", - "Ġes os", - "uer do", - "ĠÑģ ÑĩиÑĤ", - "all er", - "ìĹĪ ìĸ´ìļĶ", - "Ġmill ones", - "ler in", - "Ġpe gar", - "on ne", - "Ġenroll ment", - "Ġli egt", - "Ġbo a", - "w iÄĻ", - "bs p", - "Ġcy cling", - "ĠBern ie", - "Ġ198 9", - "Ġд алÑĮ", - "ĠDak ota", - "ĠÑģв Ñıз", - "ĠC P", - "Ġst are", - "íĤ ¤", - "Ġprosper ity", - "Ġarrange ments", - "Ġarri ving", - "m ä", - "Ġkay ak", - "ip t", - "Ġp ardon", - "Ġrel at", - "Ġver ste", - "ĠF ig", - "Ġfo il", - "ĠTalk ing", - "pe are", - "Ġno i", - "ĠпÑĢи ÑĪ", - "Ġhoc key", - "Ġad o", - "ĠO UT", - "6 7", - "Ġhorm ones", - "ĠAven ue", - "ĠSuper man", - "Ġpres cription", - "uber netes", - "C L", - "ot ive", - "N IS", - "ien en", - "Ġsad ness", - "ĠV it", - "T y", - "Ġstar ter", - "Ġbed e", - "Ġfound ations", - "Ġso re", - "åº Ĺ", - "Ñīе ÑģÑĤв", - "ìļ °ë", - "ĠÑĩ Ñĥв", - "l ink", - "Ġmane u", - "work ing", - "Ãł n", - "ĠAtt ack", - "ĠC art", - "ve is", - "ĠRes p", - "ens ing", - "Ġì¢ĭ ìķĦìļĶ", - "Ġesc uch", - "ĠR NA", - "Ĥ ´", - "Ġad op", - "Ġb ending", - "ع د", - "Ġman ages", - "us p", - "Ġt art", - "Ġrout er", - "B o", - "Ġestab lishing", - "Ġbal ancing", - "Ġathlet ic", - "ĠS lo", - "Ġf ills", - "Ġн аб", - "Ġд ал", - "Ġpos so", - "ĠV ielen", - "Ġcrit ics", - "Ġlaws uit", - "ĠIsa ac", - "ĠÑĦилÑĮ м", - "Ġtr as", - "Ġpra w", - "ĠCra zy", - "Ġne u", - "Ġk ull", - "Ġtum or", - "ĠAP P", - "g ate", - "ĠA RE", - "9 8", - "ĠSte am", - "Ġfuck ed", - "l age", - "ĠâĻ ¬", - "ĠM D", - "f y", - "Ġshell s", - "ĠSe ems", - "iz ers", - "Ġr anges", - "ĠAnton io", - "AT ION", - "ĠB aba", - "Ġìĥ ī", - "k un", - "Ġpray ed", - "ÑĢ Ñı", - "ĠпÑĢоÑĤ ив", - "Ġse as", - "b ury", - "Ġ×Ķ× ©", - "Ġtra it", - "ĠDep ending", - "Ġd re", - "Ġkön nt", - "ÑĨ Ñĥ", - "Ġlip stick", - "ee z", - "ĠпÑĢ имеÑĢ", - "Ġassign ments", - "B ob", - "Ġmet als", - "Ġspe cially", - "å°į ä¸įå°į", - "Ġìĺ Īë", - "ĠÅ ¡", - "Ġv ista", - "ĠÎ ¬", - "Ġtw ins", - "Ġnot able", - "ĠS au", - "Ġdé velop", - "Ġç ek", - "Ġpoly nom", - "av am", - "Ġtamb é", - "он ом", - "Ġpl asma", - "Ġe fect", - "Ġlä ng", - "Ġcas i", - "Ñģ а", - "ım ı", - "ãģĻ ãĤĭ", - "ĵ¤ ìĿĢ", - "Ġlab our", - "oss en", - "ĠP un", - "r if", - "Ġd oses", - "Ġoper ates", - "ил ли", - "Ġja ar", - "st aw", - "ĠìĤ¬ëŀ ij", - "Ġat m", - "Ġprotect s", - "Ġimp ed", - "H O", - "Ġc ima", - "Ġto ch", - "ab is", - "Ġsend o", - "la us", - "Ġcur l", - "ĠN um", - "Ġspons ors", - "Ġdé but", - "ĠAlex a", - "ĠB ür", - "ĠA mer", - "Ġc ope", - "Ġиз в", - "j al", - "Ġ199 5", - "ap at", - "res se", - "ĠPri ze", - "ĠCla ire", - "ĠBrand on", - "Ġwszyst ko", - "Ġval ued", - "à¸Ļ ะ", - "Ġse ct", - "Ġsecret ly", - "Ġdiam onds", - "ĠEv an", - "ĠRP G", - "ãģ« ãģª", - "Īë ıĦ", - "ĠUnivers al", - "Ġdoub ts", - "ĠP in", - "wiÄħ z", - "ļ ©", - "Ġal bo", - "Ġbra ucht", - "AU L", - "ĠM obile", - "gr ades", - "Ġsch em", - "wh y", - "ĠN icht", - "p i", - "g le", - "Ġchor us", - "Ġg ly", - "Ġrein force", - "Ġm uff", - "ĠSh en", - "ĠH ola", - "Ñĥ г", - "vid emment", - "v ial", - "ac ious", - "laim ed", - "ĠR ico", - "Ġve gg", - "Ġillust ration", - "ĠBut ter", - "ow ad", - "Ġeu x", - "Ġenf ants", - "ĠLe ader", - "ĠVill age", - "et ically", - "ÙĨ ÙĬ", - "Ġst ew", - "Ġsurpr ises", - "Ġc ue", - "ĠGrand ma", - "ĠC elsius", - "ĠR icht", - "en c", - "Ġpet ition", - "Ġher b", - "Ġw icked", - "Ġsch le", - "oc aly", - "Ġtrans f", - "Ġtok ens", - "ĠGr ay", - "ĠB BC", - "I K", - "Ġ15 00", - "z n", - "ĠNe v", - "Ġk oy", - "Ġz ar", - "Ġbull shit", - "ĠColomb ia", - "ul ative", - "Ġwides pread", - "y ect", - "k it", - "Ġempres a", - "Ġn our", - "Ġburn s", - "at in", - "a ired", - "Ġrevolution ary", - "Ġгод Ñĥ", - "ĠLog an", - "Ġ199 6", - "ĠGra ham", - "re b", - "ĠN HS", - "æľ Ľ", - "Ġcost umes", - "Ġnaw et", - "Ġlo vers", - "ĠLuc y", - "ĠInd igenous", - "íķĺ 기", - "Ġimmun ity", - "¥ ´ë", - "uit o", - "Ġexcess ive", - "Ġdon ations", - "Ġ×Ķ ר", - "Ġì² «", - "éī Ħ", - "Ġdry ing", - "mel on", - "Ġsurve ys", - "Ġ무ì Ĭ¨", - "é¢ ¨", - "aa a", - "Ġpro be", - "an cial", - "Ġlou der", - "Ġhot els", - "ü ÄŁ", - "ag ner", - "Ġorig ins", - "Ġë§Ī ì§Ģë§ī", - "Ġ* *", - "Ġstr angers", - "ĠHa us", - "com ed", - "Ġan throp", - "Ġus o", - "ĠìķĦ ì§ģ", - "ĠY uan", - "ĠíķĦ ìļĶ", - "pl er", - "ress ive", - "Ġsp raw", - "ĠSt ew", - "Ġ199 4", - "Ġeld ers", - "Ġme inen", - "Ġj unt", - "Ġac oust", - "ĠW ohn", - "Ġban anas", - "Ġproject ion", - "ĠSt ick", - "leg t", - "spe ed", - "ĠcÅ ©ng", - "ĠW ort", - "ĠBalt imore", - "ĠÑĨ ел", - "Ġdun no", - "å¼ ·", - "? ,", - "ãĥī ãĥ³", - "ĠLoc al", - "ost o", - "Ð Ń", - "од а", - "ĠPort uguese", - "Ġtheir s", - "Ġdé m", - "åı ¦", - "Ġdra uf", - "ĠBuddh ist", - "ert a", - "G e", - "Ġcar rot", - "ĠWonder ful", - "Ġso ak", - "Ġchair man", - "gg i", - "IC A", - "f ried", - "Ġfl ick", - "ĠThrough out", - "Ġìļ °ë", - "Ġc ough", - "Ġfl uffy", - "sch ool", - "Ġr ipped", - "---- ----", - "ĠZuk unft", - "Ġн еб", - "Ġst o", - "ĠB O", - "p ent", - "ĠLaw rence", - "Ïī ÏĤ", - "st icks", - "ĠE ins", - "ĠÑĢ Ñĭ", - "ĠStr ong", - "Ġcar amel", - "Ġsp ite", - "az ar", - "éĥ½ æĺ¯", - "Ġcrit ically", - "Ġob ra", - "ow itz", - "ĠZ one", - "ĠÑĢ ек", - "Ġsu g", - "ard ed", - "Ġg ì", - "ff entlich", - "an che", - "Ø Ł", - "ast ically", - "ìĿ ¼ë", - "л ав", - "Ġsimpl est", - "ĠF riend", - "Ġque llo", - "Ġamb ition", - "Ġabb iamo", - "åº ķ", - "ĠÑĦ оÑĢм", - "ĠEs sa", - "Ġeduc ators", - "Ġstatist ical", - "éĢĻ éĤĬ", - "Ġchang er", - "Ġat au", - "éta is", - "ĠShakes peare", - "ë IJĺ", - "Ġtr iggers", - "Ġreal iz", - "Ġcel ui", - "whe el", - "Ġloyal ty", - "Ġscream s", - "ke hr", - "ĠM ega", - "e ast", - "Ġtop s", - "ĠTot ally", - "ount ain", - "l ord", - "Ġviol ation", - "ĠG A", - "Ġnic er", - "ĠF resh", - "ĠMel issa", - "fun ction", - "Ġra pe", - "Ġexcept ions", - "Ġsil icon", - "Ġliber ty", - "Ġhousehold s", - "ãģį ãģ¾ãģĻ", - "ĠC A", - "ĠÐŀ б", - "Ġli b", - "ŀ Į", - "c ific", - "Ġtrop ical", - "Ġinvestig ating", - "H D", - "Ġad apter", - "ĠP itt", - "an cia", - "ĠShe ll", - "friend ly", - "Ġconclus ions", - "Ġtur tle", - "Ġdec omp", - "Ġanim ations", - "ĠÑģ ек", - "ins i", - "Ġret ention", - "k ie", - "Ġinject ion", - "ĠMad ison", - "ì° °", - "Ġv ient", - "Ġvar ied", - "Ġviol in", - "ĠB il", - "Ġluck ily", - "Ġh tt", - "l ä", - "Ġr anch", - "çľĭ çľĭ", - "Ġsó lo", - "ìķ ħ", - "ĠD erek", - "ĠScript ure", - "оÑĢ а", - "Ġclassroom s", - "av il", - "form ed", - "Ġbefore hand", - "ĠG em", - "pre ch", - "Ġl in", - "Ġgre ens", - "ÑĨ ев", - "ĠMer cedes", - "Ġdr ought", - "gas ps", - "Ġab ortion", - "Ġter ribly", - "Ġspos ób", - "Ġsec ured", - "Ġat rás", - "Ġwavel ength", - "Ġgra ins", - "ect ive", - "Ġspace craft", - "Ġtour s", - "Ġprof es", - "Ġsur geon", - "ĠP ie", - "Ġide ally", - "arn er", - "U P", - "op ard", - "s ce", - "Ġimm ense", - "ĠOr t", - "roll er", - "ĠD allas", - "ĠNich olas", - "Ġs ulf", - "ĠToy ota", - "Ġquant ities", - "ce ans", - "Ġcu i", - "an ça", - "ĠC AN", - "itzer land", - "åĦ ¿", - "Ġz ou", - "ĠCy ber", - "le gen", - "ĠIn it", - "ed u", - "Ġa pert", - "Ġad jac", - "ou v", - "èĢĮ ä¸Ķ", - "r s", - "Ġcab bage", - "Ġwheel chair", - "iny l", - "ĠD ynam", - "ĠìķĦëĭĪë Ŀ¼", - "Ġl ing", - "h l", - "Ġмог Ñĥ", - "Ġcris p", - "Ġm ij", - "Ġd ug", - "n in", - "Ġbl oss", - "Ġbelong ing", - "Ġloud ly", - "Ġminer als", - "Ġconclud ed", - "Ġsearch ed", - "9 6", - "ĠMe et", - "ĠS EO", - "ĠС к", - "ĠH ob", - "ot ta", - "Ġpropag anda", - "Ġcin namon", - "Ġhun ter", - "Ġgeme ins", - "Ġsculpt ure", - "uls ion", - "Ġv äl", - "Ġmagaz ines", - "Ġcontrovers y", - "ä¸Ģ 樣", - "Ġsequ ences", - "ãģĦ ãĤĭ", - "Ġíļ Į", - "Ġdel eted", - "ä½ ¿", - "IJë ıĦ", - "Ġvary ing", - "ãĥ Ĩ", - "Ġmount ing", - "Ġaff air", - "Ġpath ways", - "æ ¦", - "Ġdig o", - "äº ®", - "Ġд ок", - "A lex", - "Ġtob acco", - "ĠC V", - "Ġbother ed", - "Ġamb ient", - "ink y", - "ĠS L", - "Ġh ates", - "Ġje żeli", - "Ġcon greg", - "Ġel as", - "Ġde uts", - "ĠStud ios", - "ch ÄĻ", - "Ġdocument ed", - "ĠCru z", - "ĠL en", - "ĠDoug las", - "ĠPort ugal", - "ent i", - "Ġsp ouse", - "Ġanal ys", - "av ia", - "Ġed ited", - "Ġl ại", - "bu ilt", - "Ġv ille", - "ad ora", - "Ġbrac elet", - "Ġs ushi", - "Ġp m", - "Ġtra ils", - "Ġl ug", - "Ġö ver", - "Ġs orrow", - "Ġcol ony", - "ado x", - "Ġser ie", - "any ak", - "ĠØ ·", - "ĠG ulf", - "æĺ¯ ä¸įæĺ¯", - "ĠP V", - "ĠSam uel", - "ĠK it", - "ĠR al", - "ont in", - "ex pl", - "Ġent ries", - "Ġactiv ists", - "P s", - "Ġs ant", - "ĠÑĤо Ñĩ", - "ĠBr uno", - "ke ley", - "Ġtut to", - "é Ķ", - "Ġv intage", - "Ġterr ified", - "Ġпо Ñħ", - "us ive", - "ow ers", - "ай ÑĤ", - "ë ıĻ", - "Ġtwist ed", - "ĠTh ought", - "Ġt ah", - "Ġshr ink", - "Ġshe er", - "l it", - "Ġdal am", - "Ġd ib", - "Ġv ard", - "ow ane", - "Ġdo br", - "ĠR ena", - "ĠÑģво Ñİ", - "ĠpaÃŃs es", - "ĠE ra", - "ãģ® ãģ§", - "ĠB UT", - "s ighs", - "Ġê·¸ ê±°", - "Ġgro ÃŁen", - "Ġë¹ ¨ë¦¬", - "Ġn erves", - "Ġconst it", - "Ġpreoc up", - "ĠG ay", - "ĠX u", - "keep er", - "he ure", - ".. )", - "ĠCal m", - "ĠUn idos", - "ĠìĿ´ ê²ĥ", - "ĠAqu i", - "Ġìłľ ìĿ¼", - "d ır", - "ì¦ ĺ", - "y our", - "ĠÑįÑĤ им", - "20 20", - "Ġr und", - "ĠH O", - "ĠC atherine", - "iel i", - "Ġf usion", - "Ġide ology", - "Ġfor am", - "sh aped", - "ĠíĽ Ħë", - "Ġw t", - "Ġret r", - "Ġpr éc", - "Ġê° ij", - "Ġopen ly", - "v ity", - "구 ìļĶ", - "Ġobst acle", - "Ġbo o", - "Ġse iner", - "ic orn", - "Ġeigen lijk", - "Ġhead er", - "are mos", - "Ġso fter", - "ĠÐŁ од", - "Ġpre jud", - "Ġdefin es", - "ier te", - "Ġbl ending", - "Ġbelie vers", - "ĠWo chen", - "Ġник ак", - "ĠÐļ огда", - "ĠTyp ically", - "Ġíģ ¬", - "ç® ¡", - "ci os", - "Ġmiss iles", - "Ġsp onge", - "ĠK itchen", - "Ġt ren", - "ning en", - "Ġsc rap", - "Ġser ait", - "´ì ł", - "ç ¹", - "Ġë° ĺë", - "Ġrest ored", - "Ġprzy kÅĤad", - "ĠK ubernetes", - "Ġsa it", - "Ġu w", - "Ġen abling", - "Ġtra vers", - "amp s", - "åı Ĺ", - "ĠOM G", - "ens or", - "Ġz osta", - "Ġpronoun ced", - "A ng", - "norm al", - "Ġeconom ies", - "t in", - "ĠChamp ion", - "iz en", - "Ġar beiten", - "ĠG ospel", - "ĠZ u", - "ng a", - "Ġliter acy", - "ĠM ans", - "Ġcircul ation", - "Ġad ap", - "ĠTot al", - "Ġmere ka", - "Ġol acak", - "ÑģÑĤ аÑĤи", - "J ack", - "Ġm und", - "Ġth ief", - "b ies", - "Ġê² ģ", - "a que", - "ĠÚ© ÛĮ", - "ĠSc ar", - "å ²", - "Ġab ol", - "Ġdev ote", - "Ġ0 1", - "Ġs itten", - "ĠVis ual", - "we ek", - "s ome", - "ing t", - "Ġjournal ism", - "ĠH ir", - "ĠB achelor", - "in ery", - "Ãľ ND", - "ãĥ Ł", - "ç» Ļ", - "Ġcolor ing", - "ĠCr ist", - "Ġcelebr ities", - "ĠÑĩ иÑģ", - "ĠC rit", - "Ġdifferent iate", - "ĠÐľ не", - "el im", - "Ġse afood", - "Ġalgum as", - "otherap y", - "æĪ °", - "Ġgla ub", - "Ġarbitr ary", - "g ens", - "ĠбÑĥд ем", - "Ġt av", - "Ġcream y", - "ĠCount ry", - "a ñ", - "м еÑĤ", - "Ġh inter", - "Ġm ism", - "Ġillust rate", - "ÃľND NIS", - "Ġdecre asing", - "Ġwen iger", - "AK I", - "ix on", - "Ġн ей", - "Ġfat to", - "Ġn erd", - "ç ł", - "Ġb itte", - "P er", - "Ġt ane", - "Ġgö z", - "Ġfor te", - "ĠE y", - "Ġнав еÑĢ", - "è¢ «", - "ĠWord Press", - "ĠM is", - "Å ¯", - "z äh", - "Ġinté ress", - "osa urs", - "ĠFall s", - "Ġn essa", - "9 7", - "Ġmuseum s", - "Ġcorrespond s", - "Ġs ings", - "f our", - "Ġed er", - "ĠCommun ist", - "o a", - "ne k", - "ĠWH O", - "Ġcor po", - "Ġmess ing", - "ÏĦ αι", - "Ġbrush es", - "Ġb isc", - "ĠAr beits", - "ĠT ax", - "Ġse le", - "Ġflag s", - "ou pe", - "Ġanticip ated", - "ãĥ ij", - "ĠN ad", - "Ġpou red", - "Ġm l", - "Ġll ama", - "Ġvisual ize", - "Ġlisten ers", - "ÙĦ Ùĥ", - "al ten", - "Mich ael", - "Ġcos ì", - "Õ¡ Õ", - "op us", - "Ġíķ´ì £¼", - "Ġh ike", - "ĠAtt orney", - "ĠHill ary", - "ud ed", - "Ġíķĺ ì§Ģë§Į", - "Ġdo ve", - "Ġstorm s", - "ак Ñģ", - "Ġdoct rine", - "Ġhe x", - "ik s", - "no ÅĽÄĩ", - "Ġscript s", - "Ġδ εν", - "ĠÑįÑĤи Ñħ", - "ĠÐ Ĩ", - "ab er", - "ĠV as", - "Ġcent imeters", - "×ŀ ×Ķ", - "ни б", - "Ġrid ers", - "ĠT rib", - "åĮ ħ", - "Ġtak że", - "Ġn oun", - "Ġic ons", - "Ġsole ly", - "mind ed", - "Ġdisp on", - "ĠSw itzerland", - "Ġcl usters", - "Ġqu eda", - "ail ing", - "Ġman ga", - "Ġ6 8", - "Ħ Ī", - "Ġt et", - "g ins", - "ha us", - "ç© º", - "å· ¥", - "ĠO P", - "ot ed", - "Ġnouve au", - "AL LY", - "ÙĪ د", - "ò n", - "Ġmort ality", - "ĠGit Hub", - "d rop", - "Ġdis gu", - "Ġrec om", - "Ġloc als", - "Ġhome made", - "amb a", - "Ġpron unciation", - "Ġal phabet", - "ан ÑĮ", - "ow any", - "ir as", - "id ency", - "OM E", - "ĠÑĢаÑģ Ñģ", - "ar ak", - "v iamente", - "Ġnon profit", - "ĠYouT uber", - "Ġp arenth", - "ĠB oo", - "v at", - "ĠSt ir", - "Ġpre cip", - "Ġan ts", - "Ġall y", - "ĠMa ori", - "ĠëĮĢ íķľ", - "åı¯ æĺ¯", - "og ene", - "ĠLab our", - "aret te", - "Ġrecy cling", - "ens a", - "Ġpurs uit", - "Ġs ak", - "ĠÐĹд еÑģÑĮ", - "Ġtoler ance", - "Ġsa at", - "Ġclick ed", - "âĻ ¥", - "Ġface book", - "ĠInt o", - "Ġincent ives", - "기 ëĬĶ", - "ĠD ennis", - "ĠW ik", - "ges ch", - "à¹ĢภĽ", - "ĠÏĢ α", - "ĠWh oo", - "Ġround ed", - "Ġdo pe", - "Ġcapt uring", - "ĠWar ri", - "Ġcivil ian", - "Ġchar ming", - "Ġes as", - "Ġsust ained", - "Ġle aning", - "Ġabund ance", - "ÃŃ lia", - "алÑĮ нÑĭй", - "Ġph ải", - "ac ja", - "Ġê°Ļ ìķĦ", - "act iv", - "า ย", - "Ġ9 7", - "Ġм ой", - "c ro", - "ĠJack ie", - "itt ees", - "br acht", - "ul ent", - "Ġìł ľë", - "Ġplug in", - "v antage", - "part y", - "Ġsu as", - "Ġan te", - "Ñĥ л", - "ÐĿ ÐIJ", - "æĤ ¨", - "ĠÏĥ Ïħ", - "Ġmet h", - "Ġenthus iasm", - "ÑıÑĤ ÑģÑı", - "íĻ Ķë", - "Ġsynth etic", - "Ġseason ing", - "ĠL ost", - "on omy", - "ĠSp ark", - "Ġb ure", - "Ġass ured", - "Ġimag in", - "Ġcar ro", - "S ha", - "Äħ t", - "нÑĥ ÑĤÑĮ", - "át ica", - "T Y", - "Ġk ern", - "ĠBrazil ian", - "à °", - "Ġsusp ended", - "ĠCar ib", - "Ġbiz im", - "ĠOl iver", - "ãģ ¶", - "T om", - "Ġпл ан", - "Ġn ope", - "omet hing", - "Ġbe iden", - "ÑĨ ен", - "Ġflu ct", - "Ġμ οÏħ", - "Ġf athers", - "ĠBl ake", - "Ġup ward", - "ĠD ash", - "ĠL il", - "ĠìĪ ĺëıĦ", - "Ġrevel ation", - "Ġelev ated", - "ĠJi ang", - "LE D", - "ĠThom pson", - "Ġмог ÑĥÑĤ", - "ÑģÑĤ ÑĢÑĥ", - "if iers", - "Ġcome back", - "Ġbuy ers", - "ê² °", - "ĠS ales", - "иÑĩ е", - "c iones", - "Ġwh istle", - "Ġd ull", - "LE X", - "Ġíķĺ ê²łìĬµëĭĪëĭ¤", - "Ġcrimin als", - "Ġdes cent", - "ipp le", - "mas ı", - "Ġfool ish", - "ĠдÑĥм аÑİ", - "t ar", - "Ġman go", - "Ġchore ography", - "M att", - "Ġterr itor", - "Ġac aba", - "ĠEin stein", - "ĠI BM", - "ĠMet al", - "ĠCry stal", - "Ġr ah", - "Ġf oul", - "ĠIsland s", - "Ġint act", - "ĠR ail", - ". :", - "Ġac á", - "ĠпÑĢ оп", - "еÑĢ е", - "ĠWr ite", - "he he", - "ĠF O", - "ĠÏĥ ÏĦη", - "Ġdo in", - "h eld", - "Ġappropri ately", - "Ġdeliber ately", - "Ġarch ive", - "Ġgive away", - "ãģĵ ãģĵ", - "Ġfin ale", - "л аÑģ", - "ен о", - "Æ¡ n", - "æ£ Ĵ", - "og o", - "çī ©", - "ĠAud ience", - "ãħ ł", - "Ġsub ur", - "Ġhead ache", - "ан нÑı", - "ĠW itch", - "ĠSwed ish", - "ĠB I", - "Ġer ase", - "Ġk hi", - "Ġcomment ary", - "ĠS ultan", - "íĥ Ŀ", - "ĠLe ban", - "Ġë³´ì ĭ", - "ĠP am", - "pe kt", - "mon th", - "Ġground ed", - "ê ¾", - "ĠÅŁek ilde", - "2 50", - "ĠS CH", - "ios o", - "Ġin aug", - "he imer", - "Ġreflect ing", - "ĠR uth", - "ĠO il", - "Ġtrou ver", - "u ep", - ".. ]", - "Ġìŀ Īë", - "Ġol ha", - "Ġreason ably", - "Ġgl itch", - "U B", - "ĠGr an", - "Ġad alah", - "Ġl ent", - "ر ا", - "Ġtr action", - "Ġadjust ing", - "´ ¤", - "ниб ÑĥдÑĮ", - "Ġд оп", - "Ġstretch ed", - "Ġor t", - "Ġcos ine", - "vi ol", - "Ġì ħ", - "c ir", - "Ġbast ard", - "ä¸ ĩ", - "ĠÑħ од", - "Ġqu ier", - "Ġpress ures", - "ĠAn h", - "å¹ ¾", - "Ġell es", - "Ġд ÑĢÑĥз", - "ĠможеÑĤ е", - "Ġch á»", - "ĠM é", - "ö k", - "ầ u", - "ìł Ī", - "z in", - "Ġca ution", - "ib an", - "Ġjud ging", - "ÑĥÑİ ÑĤ", - "Ġb aj", - "ĠС ейÑĩаÑģ", - "ĠPo or", - "ĠNaz i", - "Ġup beat", - "y ang", - "Ġweek ends", - "ĠEss entially", - "Ġol uyor", - "Ġspat ial", - "ack er", - "Ġsell er", - "Ġ×IJ ×ķת", - "ij ׾", - "Ġv ivid", - "ĠB ond", - "ê ¶Į", - "is kt", - "ãĤ µ", - "Ġgo at", - "dri ver", - "Ġm ug", - "ict ional", - "Ġall t", - "ĠIn iti", - "ĠR and", - "Ġfinish es", - "Ġê° Ī", - "Ġvit am", - "Ġteen agers", - "ĠMor ris", - "ì¤ Ħ", - "ĠO ri", - "i ya", - "Ġmy ös", - "St ep", - "ĠK re", - "è¾ ¦", - "Ġdin osaur", - "Ġëª ĩ", - "aff e", - "ĠëIJ ©ëĭĪëĭ¤", - "Ġz eg", - "åĪ ĩ", - "ĠManh attan", - "Ġsu jet", - "ue lle", - "st off", - "Ġd ür", - "Ġsub mar", - "es es", - "Ġa quele", - "Ġn ou", - "ĠFa ith", - "t z", - "ĠÑĤ омÑĥ", - "ace ut", - "li ers", - "Ġband width", - "Æ°á» Ŀ", - "Ġrespect ive", - "ĠA ve", - "Ġspread she", - "ĠS ent", - "ic amente", - "Ġinf ra", - "Ġlearn ers", - "Ġà® ī", - "ai ah", - "ren al", - "Ġmust ard", - "Ġhab t", - "ç ĥ", - "ĠQu é", - "Ġanaly zing", - "æ¯ ı", - "Ġso lic", - "Ġ×Ķ ×ķ×IJ", - "Ġcaus a", - "Ġwel comed", - "ĠS uccess", - "Ġfac ile", - "ĠÐŁÐ¾ÑĤ омÑĥ", - "sche in", - "Ġf etch", - "Ġstr at", - "ĠÑģÑĤо иÑĤ", - "ìĹIJìĦľ ëĬĶ", - "ĠÑģп оÑģоб", - "m am", - "Ġser ÃŃa", - "nam ents", - "wr iter", - "Ġconsult ing", - "íĺ Ģ", - "ĠBer keley", - "e u", - "as ive", - "U U", - "ĠAnal yt", - "Ġsubm ission", - "Ġmagnific ent", - "en za", - "Ġe con", - "Ġprof iles", - "Ġinc ar", - "A b", - "ĠN un", - "Ġh ic", - "scream ing", - "Ġresil ient", - "åĪ ©", - "gr und", - "Ġconc ur", - "Ġbere its", - "L D", - "Ġnur t", - "ì ī", - "Ġfe ast", - "Ġenc uent", - "ĠMich el", - "Ġsup rem", - "\" ]", - "Ġfeed s", - "ĠKoll egen", - "iss er", - "ĠF eng", - "ĠW en", - "m un", - "Ġten ÃŃa", - "ĠW rest", - "Ġìĺ¤ëĬĺ ìĿĢ", - "Ġst ead", - "Ġrest oration", - "Ġdon ated", - "Ġdel s", - "Ġc ensus", - "Ġdesper ately", - "worth y", - "H E", - "ĠSp a", - "ĠBry an", - "Ġh j", - "ĠR aw", - "ìķĦ ë", - "ĠCam era", - "Ġz ien", - "Ġst yl", - "ĠT W", - "ĠChe ese", - "bor ne", - "Ġob l", - "ĠAl ready", - "Ġunst able", - "Ġfl ames", - "p ost", - "H a", - "rom agn", - "ĠìĹ Ħë§Ī", - "d est", - "Ġkole j", - "Ġtempor arily", - "Ġdeterm ining", - "ĠGl ass", - "ÑĢ он", - "ol an", - "Ġdom inated", - "åĮ ĸ", - "__ __", - "ĠÙĩ ذا", - "ĠD ana", - "Ġdin heiro", - "a qu", - "ë ¯¼", - "ĠÃł s", - "ĠJo ey", - "ĠGr iff", - "Ġatt ain", - "Ġtrans itions", - "ĠLiter ally", - "ен д", - "ĠHa ven", - "Ġgrab bing", - "Ġcryst als", - "ĠFour th", - "Ġcand les", - "ĠÑģлÑĥÑĩ а", - "ric o", - "Ġ5 000", - "et to", - "Ġund o", - "Ġk to", - "Ġdi vert", - "Ġch ir", - "Ġper sec", - "Ġh iking", - "Ġannounce ments", - "çĶ ±", - "з Ñĭ", - "Ġa uc", - "Ġsystem ic", - "ĠR M", - "Ïĥ α", - "ĠÐĶ ж", - "Ġy ar", - "ĠW ard", - "Ġpiss ed", - "Ġcar n", - "Ġautonom ous", - "ãħİ ãħİ", - "so ver", - "æ²Ĵ éĮ¯", - "å¾Ī 好", - "Ġref lex", - "Ġgard ens", - "Ġd ated", - "ì ±", - "ami ÄĻ", - "Ġcontinu ity", - "Ġcitizens hip", - "Ġsch wer", - "Ġz ak", - "t able", - "ĠÑģ Ñĩ", - "è§ ģ", - "ĠÏĥ ε", - "Ġgener ates", - "구ë Ĥĺ", - "ö h", - "ó m", - "al am", - "ĠJUD Y", - "ĠB ug", - "Ġãģ ¦", - "Ġdr ones", - "Ġá gua", - "ac aks", - "æ ļ", - "ĠÐļ он", - "× ĸ×Ķ", - "Ġstri ve", - "ĠAl tern", - "Ġne arest", - "Ġpro yect", - "ter a", - "ĠASH LEY", - "Ġwor m", - "Ġre play", - "Ġt ara", - "ĠInd ians", - "ãĤ °", - "ica id", - "ĠìĪ ľ", - "Ġappe aling", - "ĠW es", - "Ġment ions", - "Ġдел е", - "Ġk w", - "Ġfrag ile", - "is z", - "k ów", - "h ang", - "col or", - "Ġpresident e", - "8 7", - "е ÑĦ", - "çĪ ¸", - "Ġдоб ав", - "ĠN elson", - "á fic", - "ĠMIC HAEL", - "Ġmechan ic", - "Ġmet res", - "Ġo czywiÅĽcie", - "ĠC ind", - "Ġog sÃ¥", - "Ġlands ca", - "AC E", - "Ġhead lines", - "Ġcat alyst", - "ĠC atch", - "ink les", - "Ġp ills", - "ord o", - "Ġimmig rant", - "Ġexam ination", - "Ġacc idents", - "zÄħ d", - "Ġqui ere", - "Ġne lla", - "Ġ6 7", - "Ġpass a", - "Ġsuper fic", - "ist or", - "Ġno v", - "ëĭ µ", - "Ġmand ate", - "is ons", - "ĠVirt ual", - "Ġsel ber", - "Ġcounsel ing", - "ĠN BA", - "Ġse pt", - "Ġbelie ver", - "Ġmar vel", - "ĠInte gr", - "Ġм Ñĸ", - "Ġor ph", - "Ġback ward", - "ĠGen eration", - "ĠP ict", - "ĠÑĤо ÑĤ", - "Ġtap i", - "pro chen", - "Ġhall way", - "ht e", - "ĠÛģ ÛĴ", - "ĠZ um", - "èĢģ 師", - "ach ment", - "iqu er", - "fol g", - "ĠEd die", - "ĠK il", - "Ġwell ness", - "st ock", - "è¼ ĥ", - "Ġka ç", - "Ġterror ism", - "Ġpo inter", - "O f", - "her ic", - "ĠUlt imately", - "Ġmes es", - "ĠTr ade", - "Ġp int", - "Ġtu ition", - "Ġdisag re", - "Ġê²Į ìŀĦ", - "Ġmanus cript", - "Ġro omm", - "Ġoutput s", - "е ÑĨи", - "Ġr ies", - "Ġsal ud", - "otz dem", - "Ġmass es", - "Ġby ÅĤa", - "Ġclear ing", - "Ġdisc ourse", - "ats on", - "Ġfold ed", - "ĠJ ar", - "ÙĦ Ùī", - "9 00", - "ĠÑĥ Ñģп", - "Ġprophe cy", - "Ġinterf ere", - "иÑħ од", - "๠Į", - "Ġth ri", - "Ġ×ŀ× ©", - "Ġlaz ım", - "Ġ199 2", - "Ġfut uro", - "Ġlock ing", - "Ġembar go", - "ĠNe ither", - "iv amente", - "ĠmÃ¥ ste", - "Ġm ik", - "Ġcollect or", - "еко ÑĤоÑĢ", - "ĠG and", - "Ġsent ir", - "ĠM ight", - "å¡ Ķ", - "Ġgan zen", - "U C", - "Ġrel ating", - "S D", - "Ġmos quito", - "G R", - "Ġho llow", - "âĺ ħ", - "ĠWalk er", - "Ġaffili ate", - "Ġduplic ate", - "н ем", - "Ġgra pe", - "ĠOrgan ization", - "Ġsy nt", - "J oe", - "Ġg eg", - "Ġreve aling", - "ĠEth an", - "out er", - "Ġy ay", - "é« Ķ", - "л аÑĢ", - "Ġreported ly", - "Ġihr er", - "Ġrecogn ise", - "Ġbum per", - "ĠR andy", - "ĠVen us", - "t les", - "Ġappet ite", - "Ġgluc ose", - "Ġch odzi", - "ĠFurther more", - "t ir", - "Ġcont a", - "Ġint uition", - "Ġalt itude", - "Ġch unks", - "ĠJosh ua", - "ıģ ım", - "ry lic", - "le ans", - "ĠíĶ ¼ë", - "L L", - "Q ue", - "Ġg or", - "Ġзна ÑĩиÑĤ", - "Ġpo ems", - "Ġexc el", - "Ġexpl ored", - "Ġpop ul", - "Ġinclus o", - "st ä", - "ĠG avin", - "all ing", - "ĠÏĦο ν", - "é ©", - "ar beit", - "ĠG as", - "Ġgl orious", - "rie ben", - "Ġsp am", - "Ġindo or", - "Ġthr ust", - "ĠA ld", - "ĠPri or", - "Ġon board", - "ãģł ãģķãģĦ", - "o ca", - "AS H", - "£ ł", - "ĠChrist ine", - "Ġdra wer", - "Ġno on", - "Ġìŀ ĺë", - "Ġperman ently", - "æ· ±", - "ĠнапÑĢ имеÑĢ", - "Ġpodcast s", - "era peut", - "pr it", - "Ġstain less", - "ĠÚ© ÛĴ", - "Ġfamil ia", - "ĠÑĢаз ÑĢ", - "un to", - "ĠÑģÑĤ ол", - "Ġh ä", - "ĠH ai", - "ĠP B", - "iz on", - "Ġkon nte", - "Ġbüy ük", - "Ġutil izar", - "Ú Ĩ", - "Ġaqu esta", - "Ġmix er", - "ud ent", - "лек Ñģ", - "ÅĤ u", - "ĠÑģиÑģÑĤ ем", - "Ġн оÑĢм", - "Ġfat al", - "Ġconsider ations", - "Ġvalid ation", - "Ġo li", - "Ġk ardeÅŁ", - "ĠGL ORIA", - "Ġp all", - "еÑģÑĤ е", - "Ġrect ang", - "Ġmed ieval", - "allah i", - "ast i", - "ĠSy rian", - "Ġshe ar", - "Ġdeb ug", - "ĠM ai", - "Ġknock ing", - "ĠLe x", - "ard an", - "ro v", - "Ġmem orial", - "æ° £", - "ook y", - "Ġstuff ed", - "Ġpass é", - "Ġw ig", - "Ĥ ł", - "Ġpróxim a", - "Ġ199 1", - "Ġм еждÑĥ", - "Ġnuest ros", - "ĠBe ast", - "Ġsm o", - "atch ed", - "olog ia", - "Ġм од", - "Ġge e", - "Ġconcept ual", - "Ġà ´", - "Ġdecre ases", - "Ġquer ies", - "олÑĮ ÑĪ", - "ĠA part", - "Ġex empl", - "å± ±", - "Ġfl ed", - "ĠO FF", - "gg ak", - "Ġbe ad", - "h ir", - "l ies", - "ĠClear ly", - "ı lar", - "Ġch ess", - "Ġwhich ever", - "Ġ9 6", - "Ạ±", - "Ġrespect s", - "Ġм оÑĢ", - "Ġorgan ism", - "Ġgrand pa", - "ĠV ie", - "è·Ł ä½ł", - "Ġflo oding", - "Ġupgrad ed", - "Ñij ÑĢ", - "Ġcheek s", - "Ġcon quer", - "Ġstub born", - "Ġpuzz les", - "Ġau ction", - "Ġre lying", - "ĠPRO F", - "ĠEs per", - "ĠÐľ У", - "Ġhy pe", - "Ġposs ibil", - "Ġimp rison", - "ĠEr n", - "ìĹĪ ìĬµëĭĪëĭ¤", - "Ġenv ie", - "Ġresur rection", - "ä¸į è¡Į", - "Ġs per", - "ĠVenez uela", - "s om", - "Ġìŀł ê¹", - "Ġnouve lle", - "Ġclos es", - "Ġ19 40", - "Ġqu a", - "ĠJ ared", - "ĠP ir", - "Ġind e", - "Ġscr ub", - "uk u", - "Ġrequ iring", - "Ġв ами", - "Ġconsider able", - "åIJ Ľ", - "il ia", - "Ġin ne", - "Ġmein em", - "Ġhard ship", - "Ġtra ps", - "ro c", - "ĠìĦ ¤ë", - "Ġresearch ing", - "ĠMarg aret", - "Ġpen ny", - "Ġbı rak", - "Ñij л", - "Ġw ool", - "Ġr het", - "Ġflat ten", - "ç ĩ", - "à¹Ģภ£", - "Ġp ied", - "ĠCh ap", - "Ġunder m", - "Ġf ret", - "Ġcrash ed", - "ĠFra uen", - "Ø° Ùĩ", - "iv an", - "Ġliter ary", - "late go", - "Ġsp äter", - "Ġsimilar ities", - "â Ĩ", - "ĠCor on", - "ĠC reek", - "Ġboss es", - "Ġaccompan ied", - "Ġdeb ates", - "Ġassemb led", - "Ġà ģ", - "ĠV ai", - "Ġtr act", - "Ġsimple ment", - "ĠAr in", - "Ġvulner ability", - "Ġhorm one", - "I EL", - "OO K", - "Ġrel ay", - "ĠAnd rea", - "r il", - "Ġnecess ity", - "aceut ical", - "Ñİ Ñī", - "ous ing", - "nah men", - "Ġfoot print", - "m ap", - "ĠT ier", - "ann ya", - "int end", - "åĸ ®", - "å ¢", - "Ġdecor ate", - "Ġzomb ies", - "ĠHy d", - "ĠSu z", - "Ġcampus es", - "ĠE mb", - "Ġthr ottle", - "Ġad min", - "Ġop ortun", - "Ġmir rors", - "Ġident ities", - "ĠCl in", - "Ġë¹ Ħë", - "á¹ £", - "ĠO tt", - "Ġbl ues", - "Ġimpress ions", - "- ,", - "Ġv ague", - "a fe", - "Ġinfer ior", - "eral d", - "Ġmedic ines", - "Ġpre gunta", - "os ely", - "Ġt élé", - "ĠMon th", - "ĠLe aders", - "ĠEgypt ian", - "Ġr ation", - "k ers", - "he its", - "Ġre cht", - "P lay", - "Ġe g", - "Ġpoll s", - "ĠWOO DR", - "Ġsl ots", - "j am", - "B oth", - "ĠR at", - "ÑĢ аж", - "ĠBr ight", - "ä¸Ģ å®ļ", - "á»ij i", - "ur ious", - "Ġsing ers", - "Ġlo gin", - "Ġt êm", - "l ation", - "ĠM um", - "Æ°á»Ŀ ng", - "ĠEd itor", - "åIJ ij", - "Ġinnov ations", - "h ave", - "ĠS ek", - "Ġwe aker", - "ĠG ob", - "A fter", - "´ì §Ģ", - "Ġ문 ìłľ", - "ãĥ¼ ãĥ¼", - "Ġdisad vantage", - "ç¢ º", - "Ġg aze", - "ĠM ack", - "Ïģ ί", - "ĠK iss", - "ĠH olo", - "ĠBir th", - "iz i", - "b ab", - "ä¿ Ŀ", - "ìĭľ ê³ł", - "д еÑĢж", - "Ġsqu at", - "кÑĥ Ñģ", - "un i", - "ĠComm e", - "ĠWOODR UFF", - "ĠChampions hip", - "Ġwel che", - "ĠY outh", - "z em", - "Ġod pow", - "Ġpersist ent", - "r ut", - "ìĶ ©", - "íĸ ¥", - "la ir", - "ik u", - "Ġvend or", - "Ġch úng", - "Ġfinan ci", - "Ġover ly", - "â u", - "Ġgl uten", - "Ġ18 00", - "Ġdiv isions", - "Ġciud ad", - "Ġob ed", - "Ġwar um", - "Ġe her", - "Ġel im", - "ĠÐĴ о", - "Ġpeu vent", - "ĠW anna", - "Ġattend ance", - "Ġassess ments", - "ĠB og", - "Ġimag ery", - "Ġcollect ively", - "Ġinform al", - "ĠSch we", - "Ġde utlich", - "ĠCh el", - "ĠP E", - "ow ed", - "Ġb anner", - "Ġshel ves", - "ĠRet urn", - "æĭ ¿", - "LAUGH S", - "Ġcongrat ulate", - "ĠNor way", - "Ġd well", - "ĠCarib bean", - "Ġnorm s", - "ĠAn imal", - "ĠValent ine", - "Ġext ending", - "ĠV ou", - "or r", - "ĠCh eng", - " ¡", - "ĠдоÑĢ ог", - "Ġve g", - "Ġh Ã¥", - "ĠX in", - "Ġì¹ ´ë", - "em et", - "Ġhyp oth", - "Ġinteress ante", - "ric es", - "I Z", - "ĠUS D", - "Ġrun ner", - "ĠB ag", - "Ġê ½", - "Ġcomeç ar", - "Ġpig s", - "Ġweakness es", - "P h", - "ĠVi ol", - "ä¸į çĶ¨", - "Ġdra gging", - "ĠAqu ÃŃ", - "ĠCS S", - "Ġmill imeters", - "Ġest ás", - "Ġac ute", - "Ġde jar", - "i ÄŁ", - "ob ra", - "L ove", - "Ġsil k", - "** **", - "Ġjo ins", - "Ġpro l", - "Ġê°IJìĤ¬ íķ©ëĭĪëĭ¤", - "æĶ ¯", - "ØŃ Ø¯", - "agh etti", - "än ner", - "Ġstr ang", - "Ġdoub led", - "Ġdescri ptions", - "Ġst ellen", - "Ġpart i", - "ç« ĭ", - "² Ħë", - "Ġö ÄŁ", - "ig hing", - "Ġang ular", - "Ġnat uur", - "ĠSh el", - "Æ° Æ¡", - "Ġr ays", - "Ġse per", - "st art", - "v ised", - "Ġrush ed", - "Ġinternation ally", - "Ġnive l", - "Ġbox ing", - "fall en", - "á»ij c", - "Ġse inen", - "plic ity", - "Ġcarb oh", - "ĠTra vis", - "us o", - "ĠPh ase", - "Ġactiv ation", - "Ġop io", - "· ¨", - "Ġdecre ased", - "C ar", - "Ġbund le", - "Ġexp end", - "orm al", - "Ġadjac ent", - "Ġme e", - "ĠоÑĢ г", - "Ġtrans cript", - "ĠLang uage", - "G S", - "è§ ī", - "Ġse ul", - "Ãł nh", - "Ġn ya", - "ning s", - "Ġìĭ ľë", - "ĠëĶ°ë Ŀ¼", - "ĠA gr", - "ÃŃ d", - "çķ Ļ", - "Ġab y", - "ĠNe o", - "ıyor uz", - "ĠThink ing", - "a ime", - "Ġv ite", - "Ġtrav és", - "Ġ×ij× ¢", - "Ġм ед", - "O ur", - "ho ot", - "Ġl iner", - "ĠP izza", - "Ġhy g", - "fl ies", - "ĠContin ue", - "Ġdent al", - "ĠT ib", - "Ġreg ulate", - "lie ÃŁ", - "AL K", - "ĠTa e", - "ê¸ ¸", - "ĠBre xit", - "ĠG ut", - "Ġoccup ation", - "Ġz robi", - "â m", - "Ġwh isk", - "ä¸ĸ çķĮ", - "Ġkans ke", - "om on", - "ro be", - "Ġwar fare", - "Ġth á»ĥ", - "Ġjak i", - "Ġstro kes", - "Ġpe as", - "ĠDam it", - "H AN", - "Ġinter ference", - "Ġмин ÑĥÑĤ", - "N ER", - "out ing", - "Ġtext ures", - "Ł ī", - "ow i", - "Ġíķ Ļ", - "Ġd ens", - "Ġprotagon ist", - "än n", - "Ġgod dess", - "Ġwoll te", - "ij o", - "ĠWo che", - "ĠV PN", - "st ory", - "Ġkind erg", - "Ġfun nel", - "Ġdist ress", - "ноÑģÑĤÑĮ Ñİ", - "Ġno isy", - "ĠпÑĢод олж", - "Ġdar an", - "Ġenzy me", - "л ож", - "Ġm ute", - "Ġd war", - "Ġا س", - "Ġkom pl", - "Ġmer it", - "Ġf osse", - "ĠDr ink", - "Ġfor a", - "Ġw ohl", - "Ġbree ze", - "Ġsan it", - "Ġdr in", - "ĠìĿ´ê±° ëĬĶ", - "Ġ6 2", - "Ġì° ¨ë", - "aby tes", - "Ġde eds", - "ĠÐ ¹", - "i ème", - "igg ling", - "Ġ\" '", - "ĠÑĩа ÑģÑĤÑĮ", - "ĠAns wer", - "Ġev angel", - "Ġ10 80", - "ĠVis it", - "ic ient", - "Ġreli ability", - "Ñİ ÑģÑĮ", - "ĠEar lier", - "Ġf id", - "çŃī ä¸Ģä¸ĭ", - "Ġslee ves", - "iy orsun", - "Ġb ib", - "ĠAcc ount", - "Ñı ли", - "cipl inary", - "z as", - "Ġб еÑĢ", - "Ġneck lace", - "Ġbl ender", - "ĠPhill ips", - "et i", - "ĠJup iter", - "Ġprov oc", - "ĠYe ars", - "ent re", - "ac io", - "Ġk ü", - "Ġanten na", - "Ġnovel s", - "Ġf art", - "ĠS ugar", - "ĠJud y", - "Ġcollaps ed", - "ç °", - "rit is", - "Ġìĥģ íĻ©", - "ÐĹ Ð«", - "ĠVer f", - "rane an", - "ere um", - "ĠTar get", - "Ġ8 8", - "ĠÐĺ з", - "ide o", - "Ġreg ression", - "ì¶ ľ", - "Ġmów i", - "Ġstud ios", - "i ens", - "ip h", - "Ġfr ying", - "Ġfasc inated", - "ĠW ah", - "b ucks", - "m aya", - "ĠSat urn", - "ĠM ommy", - "Ġrating s", - "Ġaut umn", - "Æ°Æ¡ ng", - "Ġlos er", - "Ġcent ro", - "érie ur", - "ĠF old", - "Ġsuper visor", - "ĠNo bel", - "Ġunder est", - "ob ia", - "Ġв ÑģÑı", - "Ġver w", - "Ġfu els", - "Ġartif acts", - "Ġë¶ Ļ", - "ĠAut om", - "çļĦ æĺ¯", - "Û Ķ", - "×ķ× ¡", - "Ġih nen", - "Ġ5 9", - "ound ing", - "еÑĢ Ñĭ", - "in ars", - "ch ant", - "Ġadd icted", - "Ġexplos ive", - "Ġdisp ers", - "â ĸĪ", - "ax is", - "AR Y", - "Ġl um", - "ĠÑĥ Ñģл", - "ĠØ Į", - "Ġru pees", - "ĠPe arl", - "c amp", - "t v", - "oy a", - "Ġconclud es", - "Ġcoll ision", - "Ġbuy er", - "Ġplay ground", - "Ġspr ings", - "Ġfemin ine", - "ĠR as", - "Ġincar cer", - "íĹ ĺ", - "Ġdial ect", - "Ġclos ure", - "Ġchat ting", - "Ġb abe", - "Ġspot light", - "Ġnot ation", - "è· ¯", - "St ar", - "i ão", - "Ġt ête", - "Ġt ide", - "Ġjun to", - "Ġsen ator", - "Ð ¥", - "Ġexcus es", - "Ġbl ink", - "Ġadm ission", - "ĠL ily", - "Ñĭ ми", - "Ġam igo", - "Ġl ust", - "ëĭ ¬", - "Ġam ino", - "äºĭ æĥħ", - "Ġconsult ant", - "ĠElect ric", - "Ġëħ¸ë ŀĺ", - "uj ah", - "Ġshoot er", - "icht en", - "ĠUkrain ian", - "Ġaim s", - "ĠEnter tain", - "Ġmir acles", - "èŃ °", - "Ġze igen", - "Ġl am", - "Ġres s", - "ĠJ ill", - "yl an", - "Ġro ok", - "Ġh aya", - "Ġpass port", - "ad ata", - "Ġju icy", - "con f", - "л ей", - "ĠS z", - "Ġinter cept", - "ãģĤãĤĬãģĮãģ¨ãģĨ ãģĶãģĸ", - "ĠTe ams", - "Ġmak en", - "ir rel", - "ĠLI KE", - "áºŃ y", - "êµ °", - "Ġshort age", - "Ġparad igm", - "Ġpap el", - "Ġast ero", - "ãģ¾ ãģŁ", - "Ġsoll en", - "ĠMic key", - "ĠOr leans", - "Ġchol esterol", - "Ġgo ose", - "ÑĨи Ñİ", - "ãģĤ ãĤĭ", - "ĠF L", - "Ġгол ов", - "Ġtrib ute", - "ĠG am", - "Ġé videmment", - "Ñı Ñħ", - "å® ŀ", - "çĶ °", - "Ġin appropri", - "uh an", - "Ġorganiz ational", - "ail ed", - "Ġend ure", - "Ġ7 6", - "Ġshot gun", - "Ġliv re", - "Ġsu ited", - "Ġwarm th", - "ĠS IM", - "Ġenv ision", - "Ġde grad", - "î ne", - "La ughing", - "ĠWho ever", - "ĠBuddh ism", - "Ġspr inkle", - "ceÄŁ iz", - "Ġru ins", - "Ġst arch", - "ĠHer z", - "Ġinjust ice", - "Ġhum idity", - "ожал Ñĥй", - "ĠOb ject", - "ĠI gn", - "ĠEx am", - "ig ers", - "Ġth ou", - "ĠSo y", - "iv as", - "Ġpol es", - "m ath", - "Ġв ним", - "ING ING", - "ed ral", - "Ġexpl or", - "Ġroast ed", - "Ġcraw l", - "Ġco ff", - "Ġan om", - "Ġw ij", - "Ġimpro ves", - "Ġtreat y", - "Ġdiscover ing", - "Ġstat ute", - "Ġmerc ado", - "ĠÑģ ил", - "Ġint el", - "ĠChance llor", - "ĠMed icaid", - "ug i", - "Ġver bal", - "Ġd ön", - "Ġscript ure", - "Ġit eration", - "ek s", - "ĠOx ford", - "Ġw äh", - "ĠV ad", - "ĠA K", - "ĠìķĦ ìĿ´ë", - "Ġi ets", - "Ġneed les", - "Ùĥ Ùħ", - "Ġpas ado", - "Ġalbum s", - "Ġye a", - "et zen", - "Ħë ıĦ", - "Ġdeterm ines", - "Ġthe e", - "ĠPlay ing", - "är t", - "Ġ× ¦", - "c led", - "Ġdown ward", - "al one", - "Ġsol u", - "Ġpart ition", - "Ġw z", - "d d", - "Ġpesso al", - "å ª½", - "Ġfact ories", - "Ġble ibt", - "ม า", - "als a", - "ĠNF L", - "Ġfu era", - "Ġres erved", - "ĠE arn", - "Ġhel t", - "Ġshort cut", - "Ġconvin cing", - "sp ace", - "Ġen force", - "Ġc ores", - "Ġe fter", - "Ġrecess ion", - "x ico", - "Ġprop osition", - "ar ians", - "rop ol", - "Ġëª °ë", - "ĠÎ ľ", - "ĠìļĶ ì¦ĺ", - "Ġactiv ist", - "Ġconv iction", - "Ġz ab", - "Ġcancel ed", - "ÑĤо Ñĩно", - "ĠÎ ®", - "éĢĻ樣 åŃIJ", - "n ite", - "Ġfund ra", - "buz zer", - "ел о", - "ic ations", - "Ġz ona", - "Ġte ens", - "Ġmethod ology", - "Ġì¤ij ìļĶ", - "th an", - "ĠU l", - "ĠG rey", - "Ġh og", - "IN K", - "ĠS ung", - "ĠC laud", - "ĠCN N", - "Ġdel ivers", - "al in", - "ĠAd obe", - "ot he", - "ĠDes wegen", - "ภ³", - "Ġwer de", - "Ġgre ase", - "Ġup grades", - "ĠFin land", - "ac cept", - "Ġinter rog", - "be e", - "Ġãģ «", - "Ġpre de", - "ĠN ep", - "ĠCam bridge", - "Ġgraph s", - "Ġha unted", - "Ñģ ем", - "æ §", - "åħ ĭ", - "S ome", - "ĠM all", - "Ġrehears al", - "ĠUr ban", - "ĠL ag", - "Ġn im", - "ê° ķ", - "Ġposition ed", - "Ġavo ided", - "EM A", - "Ġlleg ar", - "Ġráp ido", - "Ġgou vern", - "Ġh ing", - "Ġdeal er", - "Ġreform s", - "Ġfat ty", - "к ол", - "ĠA ce", - "Ġne p", - "Ġì² Ń", - "Ġcomput ation", - "ĠSt ream", - "bour ne", - "t ur", - "P or", - "Ġsleep y", - "Ġbang et", - "ãģĤ ãģ®", - "Ġwe ighs", - "Ġble iben", - "ĠG ren", - "Ġun ions", - "Ġêµ IJ", - "Ġap render", - "uit ar", - "ĠJ est", - "um ing", - "ĠPlay er", - "ĠExt rem", - "Ġinteg er", - "аÑĩ е", - "Ġconcert s", - "×ķ× Ľ", - "Ġtro chÄĻ", - "ĠRe pe", - "éĩį è¦ģ", - "๠Ĥ", - "ż en", - "Ġsound ing", - "Ġan onymous", - "Ġex ca", - "ĠIran ian", - "Ġener getic", - "Ġw ives", - "ĠÑĨ веÑĤ", - "Ġa is", - "ãģĭ ãģª", - "Ġsud ah", - "Ġunder wear", - "Ġcrunch y", - "ĠP ain", - "Ġger çek", - "red ict", - "Ġm isma", - "Ñĸ ÑĤ", - "Ġsurv iving", - "ÎŃ ÏĤ", - "Ġparticip ant", - "ĠH essen", - "ári as", - "Ġsub way", - "ist ä", - "Ġcor al", - "Ġmar ijuana", - "ĠMem orial", - "ÑĪ ий", - "ri z", - "Ġsatell ites", - "Ġle ase", - "ĠCam eron", - "um ph", - "Ġclass mates", - "äh än", - "ÑģÑĤв е", - "Ġh ue", - "ĵ¤ ìĿĦ", - "Ġproport ional", - "Ġn oss", - "Ġl aps", - "r Ã¥", - "Ġbit coin", - "ÐĹЫ ÐļÐIJ", - "Ġì¶ ©", - "ĠÙĦ ÙĦ", - "ĠM ort", - "ĠEs p", - "arn os", - "ĠÑģказ ал", - "Ġä nd", - "åħ Ħ", - "×Ļ ×Ļ×Ŀ", - "ĠGe b", - "ge hen", - "I naudible", - "bor ough", - "ÑĦ ÑĦ", - "Ġfellow ship", - "ĠP aper", - "Ġcur ved", - "ĠGE OR", - "Ġcalcul ator", - "ĠCat al", - "ĠvÃł o", - "Ġby pass", - "л еÑĤ", - "à ³", - "tr ans", - "ren cies", - "ì ¡Į", - "ig ent", - "Ġtast ed", - "Ġo ceans", - "u ft", - "erv ice", - "ĠÐľÐ£ ÐĹЫÐļÐIJ", - "ĠClass ic", - "Ġrespect ively", - "~ )", - "î tre", - "ĠN ash", - "Ġz it", - "ĠìĽ ĥ", - "ĠëĨ Ĵ", - "qu ote", - "ĠUn s", - "Ġt ac", - "Ġpro ves", - "ĠPort land", - "b ly", - "Ġ ere", - "ì¶ Ķ", - "Ġépo ca", - "ĠÑĤÑĭ ÑģÑıÑĩ", - "7 6", - "Ġhad e", - "ĠF ro", - "ĠpolÃŃt ica", - "t ag", - "Ġíķ Ń", - "Ġsch ö", - "are tt", - "Ġprov isions", - "Ġmot ors", - "Ġimag ing", - "Ġdo k", - "ul ously", - "Ġme ille", - "çİ° åľ¨", - "ë IJ", - "ĠIS O", - "ĠST EM", - "ĠBow l", - "Ġto wers", - "ĠE e", - "ĠPerform ance", - "Ġlo in", - "cuss ion", - "Ġcoast al", - "ial e", - "com pass", - "Ġspell s", - "Ġdisappoint ing", - "Ġë²Ī 째", - "E ER", - "Ġvers atile", - "as ury", - "Ġen fin", - "Ġdown side", - "Ġgu iding", - "ĠاÙĦ ÙĤ", - "Ġnin ety", - "char ged", - "ĠF ans", - "Ġphilosoph ical", - "Ġg arn", - "ĠmÃ¥ nga", - "Ġwilling ness", - "Ġport ions", - "ab en", - "Ġ ï", - " ¿", - "ra ul", - "Ġspr int", - "if en", - "ıy la", - "Ġк Ñĥп", - "ãģı ãģłãģķãģĦ", - "Ġens uite", - "ĠCap itol", - "Ġ6 3", - "ĠговоÑĢ иÑĤ", - "Ġappoint ments", - "æī ¾", - "omi ast", - "Ġcare g", - "Ġpubl isher", - "Ġher aus", - "Ġε ί", - "ĠV S", - "ãģĿ ãģĹãģ¦", - "ä¸Ń åħ±", - "Ġsacrific es", - "th ird", - "Ġhuman itarian", - "ĠëĤ ´ì", - "im on", - "Ġine qu", - "Ġz ob", - "Ġcomfort ably", - "ĠD inge", - "Ġcancell ed", - "ĠPS AKI", - "ĠRob inson", - "Ġfin s", - ") ?", - "ĠHist or", - "ĠÑĩеловек а", - "Ġt bsp", - "te xt", - "k im", - "Ġupd ating", - "Ġgel d", - "f eld", - "ı ¼", - "Ġm ä", - "Ġcaf é", - "Ö Ģ", - "ĠS ri", - "ĠReg ion", - "ĠH ahaha", - "Ġfin ances", - "ĠاÙĦØ ´", - "Ġb unk", - "ru k", - "ha ft", - "Ġlater al", - "Ġext ensions", - "ĠìķĦ ìĿ´", - "Ġdefin ite", - "ĠZ hao", - "ĠLu is", - "st y", - "Ġcas os", - "ĠK lim", - "Ġ199 3", - "Ġreal ization", - "Ġhistor ian", - "Ġcrack ed", - "ëĤ ´", - "Ġsyst ème", - "ĠC IA", - "ĠÑĤ во", - "osp heric", - "Ġfle e", - "Ġr ất", - "ĠRegard less", - "Ġrel uct", - "Ġtim ely", - "ĠJul ian", - "G M", - "é Ĵ", - "ad ura", - "é£ Ł", - "Ġdress es", - "çģ £", - "ĠëĶ Ķ", - "Ġnom inated", - "Ġadvoc ates", - "ym ph", - "Ġrecord ings", - "Ġdev iation", - "Ġpriorit ize", - "Ġspir al", - "ĠYOU R", - "Ġtransp ose", - "amp oo", - "ĠìĽIJë ŀĺ", - "ĠV ision", - "Ġpol ite", - "Ġha mb", - "ĠPat ient", - "æ¯Ķ è¼ĥ", - "íģ ¬ë", - "Ġs ia", - "Ġê³ ³", - "Ġž e", - "è§ Ģ", - "Ġsuper market", - "ë ¹", - "ĠS ierra", - "Ġgr illed", - "ĠUp on", - "Ġabs ent", - "Ġme c", - "ĠAp ollo", - "Ġp unk", - "ĠPa ÅĦst", - "ĠÑģв ой", - "Ġê±° 기", - "G irl", - "Ġskin ny", - "ĠPrem ier", - "Ġterrit ories", - "Ġli ability", - "Ġj erk", - "r atic", - "Ġdan cers", - "ĠÑĥ ÑĢов", - "Ġê´ Ģë", - "on ly", - "ĠSt u", - "Ġske leton", - "ĠëŃ IJë", - "Ġзак он", - "ı kt", - "ĠMI KE", - "Ġl ö", - "m ie", - "Ġre iter", - "ãģĵãĤĮ ãģ¯", - "ĠKoll eg", - "ĠAd ams", - "lich er", - "Ġçoc uk", - "Ñı г", - "Ġbl ush", - "Ġsun shine", - "Ġe z", - "ĠDev il", - "Ġê¸ ¸", - "Ġãģ Ĭ", - "ad d", - "Ġlic ensed", - "Ġv inyl", - "ĠC zech", - "im ag", - "Ġcrack ing", - "Ġì º", - "Ġud ah", - "Ġs ommes", - "Ġìĸ¼ êµ", - "wa Äĩ", - "Ġf res", - "åij ½", - "ĠWal mart", - "ĠТ епеÑĢÑĮ", - "at isf", - "C I", - "l ang", - "Ġdiff usion", - "çĶ ·", - "Ġsom os", - "ĠM akes", - "æĪij æĥ³", - "ĠRick y", - "Ġmuch a", - "íķ ¨", - "Ġhorse power", - "as ia", - "Ġfib ers", - "Ġ erm", - "Ñģ кие", - "Ġjest e", - "Ġfire fight", - "Ġcu isine", - "Ġbesond ers", - "d ig", - "Ġì¢ ħ", - "ĠÑĥ ж", - "Ġtr acing", - "Ġcertain s", - "ĠApp ly", - "Ñĭв аÑĤÑĮ", - "ç Į", - "Ġbr u", - "ĠY ES", - "ĠB ai", - "ĠD it", - "ĠB is", - "Ġun le", - "ÑģÑĤа ÑĤоÑĩно", - "ĠAw ak", - ".. \"", - "Ġ12 5", - "Ġroot ed", - "Ġcaut ious", - "con st", - "Ġorchest ra", - "çľ ¼", - "Ġвн ÑĥÑĤ", - "Ġquel qu", - "ĠоÑĤ веÑĤ", - "ĠMet hod", - "ì¹ ľ", - "Ġμ αÏĤ", - "l ü", - "ĠìķĦ ê¹Į", - "Ġn aming", - "C har", - "ĠS icher", - "Ġprivile ged", - "ĠF ly", - "Ġãģ ĭ", - "áºŃ t", - "Ġadv ances", - "ĠZel da", - "Ġand ra", - "Ġgr inding", - "ĠEd ition", - "p f", - "Ġwarri ors", - "Ġh edge", - "Ġuns eren", - "ĠÑģÑİ Ð´Ð°", - "el iness", - "Ġpersonal ities", - "Ġf ö", - "' M", - "ĠÑĤо Ñĩно", - "Ġsh ipped", - "Ġmete or", - "Ġsurround ings", - "ĠF ill", - "u esta", - "ĠPerson al", - "ĠAll e", - "OR T", - "ä¹ ħ", - "ĠS che", - "V I", - "Ġcompar able", - "dam n", - "Ġd itch", - "Y AN", - "ism us", - "Ġpick up", - "Ġd ak", - "ĠE P", - "b est", - "ĠS ue", - "äll t", - "Ġpop corn", - "Ġfold ing", - "h ome", - "ив аеÑĤ", - "å·² ç¶ĵ", - "Ġan not", - "ch uck", - "Ġfier ce", - "Ġdam aging", - "Ġfl op", - "Ġpas ar", - "Ġre ef", - "ĠÑģво ей", - "Ġz oo", - "o vers", - "j ets", - "Ġpr ès", - "ĠSil icon", - "te ok", - "ĠS eth", - "at amente", - "Ġtransm itted", - "Ġrepl icate", - "Ġsl im", - "ĠC ream", - "æĦŁ ãģĺ", - "Ġside walk", - "ìĪ ĺë", - "Ġжиз нÑĮ", - "ĠMon ica", - "ä¾Ĩ äºĨ", - "Ġcop ied", - "ĠTer ra", - "ist ent", - "ç³ »", - "Ġо но", - "Ġwh ale", - "ĠW ITH", - "л ÑĥÑĪ", - "å½± çīĩ", - "ĠE en", - "ĠÑģво и", - "Ġord in", - "Ġpl ural", - "Ġsp okes", - "Ġdisp ute", - "Ġsens ible", - "Ġpre aching", - "Ġktó rzy", - "pt ed", - "av ier", - "Ġpist ol", - "ĠTap i", - "Ġ ÅĤ", - "ff ff", - "Ġac rylic", - "Ġignor ance", - "ĠZ iel", - "r ans", - "Ġweld ing", - "m id", - "æĪij ä¸į", - "Ġзан им", - "Ġlan es", - "Ġmin es", - "Ġmom s", - "×ķ× Ĺ", - "ĠCham ber", - "t ier", - "Ġmod est", - "ĠìĹ¬ê¸° ìĦľ", - "Ġun as", - "Ġw rench", - "hand ed", - "Ġsatur ated", - "ĠF ang", - "ĠCommission er", - "ठ°", - "Ġ× ĸ", - "ĠLouis iana", - "ĠM ask", - "Ġcub es", - "ìĶ ¨", - "Ġvidé os", - "ĠnÃ¥ gon", - "Ġr ider", - "Ġì¶ ľ", - "Ġs ón", - "ĠLat ino", - "b ank", - "íķ´ì £¼", - "ĠB rend", - "Ġsexual ity", - "... ,", - "Ġforget ting", - "Ġ ÛĮ", - "ĠAven gers", - "ĠBon jour", - "cess or", - "кÑĢа ÑĹ", - "c ence", - "Ġge ograph", - "cul o", - "о ÑģÑĤÑĮ", - "Ġswe ating", - "íĥ Ģ", - "Ġsymm etry", - "ts Ã¥", - "Ġj an", - "ĠFer r", - "é¦ ĸ", - "Ġamb assador", - "ziÄĻ k", - "Ġmus un", - "ĠÑĥ ÑĤ", - "ĠL G", - "iss ent", - "comm un", - "Ġcour s", - "Ġdevelop s", - "Ġbron ze", - "Ġsubst ances", - "dri ven", - "주 ìĦ¸ìļĶ", - "Ġa os", - "åĦ Ħ", - "ĠPROF ESS", - "h alf", - "Ġsort ed", - "ĠB omb", - "л аг", - "ĠMalays ia", - "ĠChrist ina", - "Ġteam mate", - "èģ ŀ", - "F T", - "Ġk ı", - "heart ed", - "+ +", - "ogen ic", - "Ġbell s", - "ĠOu ais", - "Ġspecial ists", - "б Ñĭ", - "dep th", - "lass es", - "g ies", - "ĠCo ffee", - "Ġmark ing", - "Ġfo ll", - "ul i", - "Ġad hesive", - "ĠB ot", - "ĠP unkt", - "e ye", - "ĠB ub", - "el ong", - "åĪ ¶", - "ĠпÑĢ ик", - "Ġdon or", - "8 4", - "Ġen for", - "Ġcatch es", - "Ġbr icks", - "Ġkn itting", - "ĠKnow ing", - "ok s", - "H Y", - "r ide", - "ĠFant asy", - "im an", - "Ġp se", - "Ġìĺ ¨", - "Ġв д", - "Ġrest ra", - "Ġevalu ated", - "ÑĢ ев", - "Ġfortun ately", - "Ġche gar", - "ر ب", - "Ġdom ains", - "ib i", - "ar ry", - "Ġshut ter", - "Ġfic ou", - "M ike", - "Ġinc lu", - "Ġdon ors", - "Ġa pl", - "ĠL ower", - "Ġimport ed", - "Ġacad emy", - "Ġfin als", - "Ġdisappe ars", - "ÙĬ ا", - "Ġadministr ator", - "j s", - "Ġcut ter", - "Ġr anging", - "ör per", - "Ġconstra int", - "ĠT able", - "ĠSh an", - "v ic", - "ĠF ix", - "ĠSw ift", - "oun ces", - "ĠWar um", - "Ġlett uce", - "app elle", - "Ġsh ave", - "Ġb ás", - "Ġ7 7", - "ĠO oo", - "a o", - "ĠMc M", - "ĠD rew", - "Ġl ump", - "Ġl ashes", - "schein lich", - "R ep", - "in is", - "ĠC ette", - "Ġcompos ite", - "emet ery", - "Ġsort e", - "ĠFin ancial", - "он е", - "ron es", - "ĠV oy", - "Ġt éc", - "ł ¹", - "ĠNin ja", - "ĠCor in", - "ен нÑı", - "ìĿ´ìĹ Ī", - "Ġn ich", - "Ġdetect ive", - "âĢ¦ \"", - "Ïĥ ε", - "Ŀ¼ë ıĦ", - "Ġë³ Ģ", - "Ġë¸ Ķë", - "Ġpro pe", - "ĠW right", - "Ġ×Ķ× ª", - "ĠSh i", - "Ġãģ Ł", - "Ġinvestig ations", - "éĤĦ æĺ¯", - "ĠPower Point", - "ĠCh u", - "Ġìĺ ¤í", - "ĠìĻĦ ìłĦ", - "ĠFra gen", - "un ning", - "Ġpour rait", - "Ġtext book", - "м Ñĭ", - "Ġf ahren", - "Ġ ÑĤоÑĢ", - "Ġl akes", - "ünd e", - "I nt", - "ĠMet ro", - "Ġmans ion", - "Ġа б", - "ĠZh ou", - "Ġcorrid or", - "Ġesc ol", - "Ġindic ating", - "ia ÅĤa", - "Ġm ommy", - "Ġarch ives", - "Ġfound ers", - "eng ine", - "ĠDie u", - "Ġsick ness", - "Ġë³´ ëĭĪê¹Į", - "Ġar b", - "Ġn ed", - "ĠCh op", - "Ġco vid", - "Ġsl am", - "Ġpublic ations", - "D C", - "Ġsp ends", - "æ ¾", - "Ġrefuge e", - "Ġd ile", - "Ġ×IJ× ĸ", - "ific ar", - "ĠS ach", - "G u", - "Ġre load", - "?? ??", - "Ġje ÅĽli", - "ĠÑģ оÑģÑĤо", - "Ġsim plicity", - "Ġbull ying", - "Ġм ол", - "Ġreal idad", - "Ġuncle ar", - "app a", - "le vant", - "ĠIS IS", - "ĠW atson", - "Ġde in", - "ĠMic ro", - "íķ ľë", - "ü g", - "Ġdev am", - "Ġtwe eted", - "å° İ", - "Ġunderstand able", - "at an", - "Ġvers a", - "Ġpre ca", - "Ġv á»ģ", - "ĠCop y", - "ĠOr acle", - "Ġmindful ness", - "Ġdisc ret", - "ern en", - "ĠP le", - "H ave", - "Ġisol ate", - "Ġde u", - "Ġsevent y", - "ĠH ills", - "Ġarc ade", - "ĠÑģп еÑĨи", - "Ġsigu iente", - "ĠB ÃľNDNIS", - "lig a", - "ĠвÑģÑĤÑĢ еÑĩ", - "ô m", - "Ġtwe ets", - "Ġsch auen", - "Ġcrit ique", - "ĠðŁİ µ", - "Ġst att", - "ĠÑģам ое", - "ân cia", - "Ġsuper natural", - "Ġplug ged", - "F l", - "yn ı", - "ĠTamb ién", - "Ġencourage ment", - "ĠSer ver", - "ëĤ ľ", - "up a", - "Ġast on", - "Ġhe ars", - "ÑĢа Ñħ", - "Ġsch e", - "Ġr ats", - "Ġrec uper", - "Ġun ten", - "ĠFight ing", - "Ġacadem ics", - "ç¤ º", - "ĠS ü", - "Ñģ киÑħ", - "Ġpa ired", - "Ģ ìĿĦ", - "Ġá rea", - "Ġsweet ness", - "åı Ĭ", - "Ġde fer", - "Ġmuit as", - "ĠAud io", - "Ġlock er", - "ÙĬ د", - "ĠÑģÑĤ ав", - "Ġbu ena", - "AN S", - "Ġdetect or", - "av o", - "be k", - "Ġα ν", - "íİ ¸", - "Ġdra gged", - "Ġдолж ен", - "à ĸ", - "ر Ø©", - "ìĿ´ì §Ģ", - "Ġcell e", - "ck ing", - "ĠاÙĦØ ¬", - "ĠCan vas", - "Ġespa ñ", - "Ġgl imp", - "Ġspread s", - "ong o", - "ĠM ason", - "ĠIn g", - "Ġê°Ģ ëĬ¥", - "ÏĦ ικ", - "Ġsec ular", - "Ġb ater", - "Ġinqu iry", - "Ġenerg ies", - "Ġmanufact ured", - "Ġveget arian", - "Ġpine apple", - "ÑıÑĤ а", - "Ġpractition ers", - "2 000", - "Ġíķ´ì ļĶ", - "ĠìĹ¬ëŁ¬ë ¶Ħëĵ¤", - "Ġë¶ Īë", - "ĠJeff erson", - "ĠJo an", - "Ġtr am", - "å® ¹", - "ch mal", - "ĠH ait", - "á¹ ĩ", - "Ġun real", - "Ġsymbol ic", - "Ġste alth", - "Ġspl ash", - "ĠEntertain ment", - "Ġmetall ic", - "?\" .", - "è¶ Ĭ", - "ar ound", - "Ġdesp air", - "ĠNev ada", - "ĠFin ance", - "Ġk rie", - "ĠL ux", - "ĠSm ash", - "ke eping", - "Ġз аг", - "Ġnarc iss", - "Ġdz isiaj", - "Ġtoler ate", - "o ard", - "Ġlink ing", - "ĠEconom ic", - "Ġì ¼", - "Ġmor ph", - "ĠN ak", - "ĠB aker", - "at on", - "r ings", - "ĠP eng", - "ĠAir port", - "ãģĭ ãģ£ãģŁ", - "íķĺ ëĭ¤", - "§ ģ", - "pr ints", - "Ġhad i", - "Ġemp ir", - "ĠL ives", - "ann ers", - "Ġн им", - "ĠPROFESS OR", - "Ġpositive ly", - "ant om", - "Ġbad ge", - "ke lt", - "Ġinter fer", - "Ġfulf illing", - "Ġvisual ization", - "éĹľ ä¿Ĥ", - "ĠPr ice", - "� �", - "Ġscen ery", - "Ġpr one", - "Ġw izard", - "Ġb anyak", - "ver b", - "s ky", - "Ġwish ed", - "Ġrail way", - "Ġü zer", - "Ġalgu ien", - "ĠA W", - "Ġкол иÑĩе", - "Ġreact ing", - "ĠB uch", - "ภ¶", - "Ġan th", - "Ġsi h", - "Ġh ust", - "ĠSc reen", - "il ant", - "ah o", - "Ġfragr ance", - "Ġelev ation", - "ĠMed iter", - "Ġë ¿", - "Ġé qu", - "Ġwra ps", - "Ġin ert", - "Ġrecre ate", - "л аÑĤ", - "Ġbo leh", - "Ġharass ment", - "unk y", - "Ġglimp se", - "reg ierung", - "Ġfut ur", - "Ġreposit ory", - "Ġeng ra", - "Ġtraff icking", - "ass is", - "ĠTre k", - "Ġë² Į", - "Ġë§ Īë", - "ĠK ab", - "ani u", - "g ive", - "Ġdin osaurs", - "Ġfe ather", - "Ġatt itudes", - "Ġpl um", - "ĠR S", - "ĠAn fang", - "ill ery", - "ĠìĬ ¤", - "M Y", - "Ġtrze ba", - "Ġsk ies", - "ĠA j", - "ur able", - "C U", - "ĠSh ane", - "Ġdepart ure", - "ĠT ON", - "iet en", - "r ats", - "æ° Ĺ", - "is u", - "Ġb ord", - "Ġinteresting ly", - "çĻ »", - "oug hing", - "Ġr ushing", - "Ġvol atility", - "Ġp yt", - "Ġform ats", - "Ġз аÑĤ", - "Ġê¼ Ń", - "Ġwhat not", - "Ġcomp ort", - "s w", - "ore an", - "ĠRel ax", - "Ġcl an", - "ĠA H", - "Ġpe w", - "Ġdiction ary", - "T ake", - "sh irts", - "ĠH ugh", - "ĠعÙĦ ÙĬ", - "ĠP ic", - "Ġenroll ed", - "Ġjed nak", - "Ġoffer ings", - "Ġcor az", - "L ife", - "Ġ !!!", - "Ġcl er", - "ĠVide os", - "ĠRod rig", - "ĠId ent", - "ĠP os", - "ĠSt age", - "ĠR ace", - "Ġen act", - "ãģĦ ãģ¾ãģĹãģŁ", - "ĠG y", - "ĠHis pan", - "Ġdef ence", - "ĠCamp bell", - "m atic", - "Ġrele v", - "Ġpe ach", - "Ħ¸ ìļĶ", - "Ġparad ise", - "Ġcere mon", - "Ġannoy ed", - "æĮ ĩ", - "la x", - "Ġexplo it", - "Ġcla use", - "ek er", - "ĠBlo om", - "n ant", - "ate urs", - "Ġhe ights", - "E ven", - "Ñģ он", - "Ġoutra ge", - "ĠVietnam ese", - "ãģ¯ ãģ¯", - "T R", - "Ġe er", - "Ġcann on", - "ĠCom b", - "IJë §Į", - "è» Ĭ", - "Ġê²ĥ ëıĦ", - "Ġaccomplish ments", - "ĠAnalyt ics", - "Ġshap ing", - "re iben", - "Ġb achelor", - "Ġfing ert", - "ack ed", - "Ġpyram id", - "ĠStew art", - "á st", - "Ġsurviv or", - "Ġdu ct", - "Ġdeal ers", - "æ´ »", - "ع Ùħ", - "ли н", - "Ġed e", - "×ķ× ¢", - "ĠÙĥ اÙĨ", - "ĠÏĦ ι", - "Ġcho oses", - "ĠO wn", - "го ÑĤов", - "h ire", - "алÑĮ нÑĭе", - "ĠÐĽ Ñİ", - "Ġо ÑģÑĤав", - "te ch", - "Ġdro it", - "Ġsubject ive", - "en es", - "Ġdiv is", - "ave z", - "Ġmaneu ver", - "à¹Ħ à¸Ķ", - "ade ce", - "ĠEn s", - "ac ial", - "ĠProt ection", - "ĸ ´", - "Ġform ally", - "Ġwy d", - "ingu ém", - "Ġz iem", - "Ġrecru iting", - "×Ļ× ļ", - "n em", - "Ġforb idden", - "ĠB apt", - "×IJ× ł×Ļ", - "Ġsubs et", - "ĠMag az", - "n ement", - "Ġaqu ela", - "rag on", - "Ġcomm ittees", - "Ġéta ient", - "ud i", - "ĠDa wn", - "Ġb ore", - "Ġcompos er", - "ĠwiÄĻ cej", - "ang a", - "Ġdis like", - "ĠD ays", - "åŁ º", - "Ġpar al", - "Ġm ientras", - "Ġheaven s", - "ãģ Ĵ", - "he id", - "Ġtrad ers", - "on ce", - "Ġmasc ara", - "ĠÏĢ Ïģο", - "Ġwhis per", - "ĠMus k", - "éĽ Ĩ", - "ĠFamil ie", - "All ah", - "ĠOl ivia", - "ĠPr os", - "Ġol ika", - "il im", - "Ġrép ond", - "ĠP eters", - "Ġ å¾Ī", - "Ġbit es", - "Ġv ic", - "ĠN Y", - "em ption", - "Ġ4 50", - "Ġvisual s", - "Ġlie u", - "ück en", - "ĠSte el", - "ĠG P", - "w ait", - "Ġnotice able", - "uch a", - "Ġreh abil", - "Ġreject ion", - "ĠÑģлед ÑĥÑİÑī", - "Ġsl ider", - "Ġregard ed", - "Ġgrav it", - "ĠRes erve", - "c ount", - "Ġbre eding", - "Ġlon ge", - "ale b", - "Ġkn ight", - "Ġв ой", - "Ġprés ent", - "Ĥĺ ìļĶ", - "ĠSpec ifically", - "Ġpos es", - "Ġve ure", - "ok ay", - "em as", - "Ġ ãģ§ãģĻ", - "Ġma jÄħ", - "Ġweb inars", - "Ġcann abis", - "Ġdam als", - "ĠNorth west", - "Ġp ada", - "Ġcrowd s", - "Ġfut ures", - "Ġä n", - "Ġciv ilians", - "ĠS achen", - "æ į", - "Ġtr aces", - "Ġ먹 ê³ł", - "Q U", - "é¡ĺ ãģĦ", - "ĠI F", - "an ın", - "ìĤ ´", - "Ġb iblical", - "ĠV ed", - "Ġst oring", - "ÑĢав лÑı", - "æĩī 該", - "Ġn ast", - "Ġd ö", - "ÑĢ оп", - "el ia", - "Ġside ways", - "ĠUnder stand", - "ĠQ ur", - "Ġper pend", - "ĠMill ionen", - "Ġwater melon", - "ĠDiv ine", - "ult ur", - "ab ord", - "Ġsuccess es", - "Ġhom bre", - "Ġcar p", - "Ġsus cept", - "ung kin", - "Ġk ij", - "ul us", - "Ø§Ø ¬", - "Ġnot ch", - "Ġpolynom ial", - "å¹ ²", - "å ©", - "Ġún ico", - "Ġteles cope", - "Ġpolit ique", - "k iem", - "ĠÎŃ Î½Î±", - "Ġaggreg ate", - "ĠGe off", - "Ġtr il", - "ĠG RA", - "Ġsubscri ber", - "im et", - "Ġдол лаÑĢ", - "op ing", - "Ġth erapeut", - "ĠCan cer", - "Ġpar ade", - "Ġir rig", - "âĻª âĻª", - "Ġclear er", - "Ġb og", - "ĠM aur", - "า à¸ĩ", - "ĠShang hai", - "acht e", - "ĠK ol", - "el ujah", - "Ġha v", - "ĠCr ime", - "se k", - "Ġë ¡ľ", - "ien na", - "ĠG or", - "è Ľ", - "ĠпоÑĤ ÑĢ", - "Ġкаж еÑĤÑģÑı", - "ĠL ift", - "ĠS ort", - "ĠP sal", - "Ġp ing", - "ĵ Ŀ", - "ph is", - "ĠF UCK", - "ĠS yn", - "Ġbam boo", - "¬ ìĺģ", - "c uts", - "Ġm mm", - "Ġfunktion iert", - "Ġ _", - "ÃŃ cio", - "St op", - "Ġimag inary", - "Ġnot amment", - "ĠIniti ative", - "ãĥ ¥", - "ĠK urt", - "Ġlo osen", - "Ġbus car", - "çģ «", - "Ġz elf", - "Ġpro ps", - "åĽ ī", - "Ġmoet en", - "Ġmill i", - "Ġhall s", - "ĠM atch", - "Ġbrack ets", - "ĠC ou", - "æ¦ Ĥ", - "ĠÐľ аÑĢ", - "IS A", - "Ġcig arette", - "Ġcompet itions", - "ĠM IN", - "Ġbeh ö", - "vo or", - "Ġ ust", - "ĠZ i", - "ĠO cc", - "ul ates", - "Ġball oons", - "Ġpr onto", - "ĠM iy", - "ĠF ile", - "Ġкл аÑģÑģ", - "нÑĥ л", - "Ġcere al", - "Ġincre ment", - "Ġref ined", - "åı¦ å¤ĸ", - "pr ising", - "ĠR F", - "Ġrespect ful", - "Ġlo ot", - "ask et", - "Ġdeix a", - "ing le", - "Ġfuncion a", - "ĠRe vel", - "Ġso ber", - "Ġperform s", - "ĠG entle", - "ãĤ ¨", - "Ġrecip ient", - "ĠHa use", - "Ġë ĥ", - "F rom", - "Ġmin isters", - "Ġpar adox", - "å°±æĺ¯ èªª", - "Ġtast ing", - "Ġ×Ķ× Ĺ", - "Ġre use", - "ĠL ane", - "ĠÑģов еÑĢÑĪ", - "Ġremem bers", - "Ġfemin ist", - "Ġcommit ments", - "Ġproject ed", - "Ġg az", - "iyor uz", - "Ġoblig ations", - "R o", - "z ar", - "Ġch w", - "ĠJ AM", - "ĠbÄĻd Äħ", - "asp berry", - "Ġм еÑģÑĤо", - "ë² ķ", - "Ġreg ulated", - "Ġw icht", - "ĠTre vor", - "Ġsecond ly", - "ĠIh re", - "els h", - "Ġrep orters", - "ÑĤоÑĢ а", - "oy o", - "G I", - "Ġinter connect", - "é IJĺ", - "OS H", - "æŃ ²", - "Ġbr ass", - "Ġign oring", - "ä»Ĭ æĹ¥", - "in fect", - "Ġpro jekt", - "ore t", - "ÏĦα ν", - "ĠÑĤ ип", - "Ġmut ta", - "Ġunbox ing", - "Ħ °", - "å¡ Ĭ", - "Ġadv ised", - "ĠDen ver", - "Ġsevere ly", - "ĠM hm", - "Ġfl ipped", - "Ġp ien", - "Ġkomm un", - "ĠF RE", - "Ġà®ĩ à®°", - "aint ed", - "Ġkn ives", - "Ġhab l", - "Ġgew orden", - "arett es", - "C S", - "Ġмал енÑĮ", - "Ġgal ax", - "Ġnin ete", - "ê±°ë Ĥĺ", - "Ġs is", - "Ġadvis ory", - "Ġdr illing", - "ĠWould n", - "ün f", - "gest ellt", - "ĠHel en", - "Ġ×ŀ× IJ", - "ap olis", - "Ġrze czy", - "Ġter ra", - "Ġhe p", - "Ġalg ún", - "ik k", - "Ġastron om", - "ĠStar bucks", - "k Äħ", - "Ġpat rol", - "Ġì½ Ķ", - "Ġg on", - "Ġ ãĢIJ", - "Ġson st", - "Ġencoun ters", - "Ġret rou", - "Ġshark s", - "Ġd or", - "ĠR ever", - "Ġev apor", - "Ġreserv oir", - "Ġalleg ed", - "ul er", - "Ġver m", - "Ġcommer ce", - "Ġf itted", - "ge m", - "Ġtact ical", - "Ġl ith", - "éīĦ å¡Ķ", - "h ad", - "è® Ĭ", - "Ġcarboh yd", - "Ġlength s", - "ι ο", - "Ġdem ographic", - "R ob", - "ĠS kin", - "cc oli", - "Ġsimpl ified", - "Ġread ily", - "ĠC um", - "ades h", - "ĠD Ã¥", - "us st", - "ig ne", - "et on", - "Ġmen or", - "q i", - "OO M", - "à¸Ń à¸Ļ", - "Ġpsych iat", - "Ġeight y", - "Ġм илли", - "ĠT ob", - "ed o", - "ç¶ ²", - "ĠÄij ến", - "Ġcirc uits", - "ĠLAU GH", - "ic ism", - "em or", - "Ġreg ener", - "eg ree", - "Ġbure auc", - "ĠAl ber", - "ä¹ĭ å¾Į", - "ĠW or", - "å¤ «", - "Ġres in", - "Ġby ÅĤy", - "ĠI G", - "à¯į ,", - "Ġ7 8", - "Ġwe eds", - "ĠMy th", - "9 3", - "æ ¿", - "ĠëĤĺ ìĻĶ", - "é v", - "á ½", - "ö ren", - "ç ar", - "ĠP AUL", - "Ġdisad vant", - "Ġposition ing", - "Ġcock tail", - "Ġagre es", - "n n", - "ĠS ally", - "M s", - "Ġinher ent", - "Ġmonet ary", - "Ġnat ur", - "ĠN h", - "ĠImp ort", - "Ġle ben", - "Ġw i", - "uss y", - "Ġob es", - "Ġwand ering", - "Ġìĭ łë", - "Äħ da", - "etch up", - "Ġdispos al", - "ĠJ A", - "ĠC er", - "z illa", - "Ġvir gin", - "ĠSl ide", - "and el", - "Ġrighteous ness", - "ĠÎ £", - "Ġide ia", - "ä½ł 好", - "иÑĢов аÑĤÑĮ", - "ר ×IJ", - "Com ment", - "Ġpre lim", - "ĠV ale", - "Ġì§Ģë Ĥľ", - "ĠV anc", - "OM AN", - "Ġп Ñĸд", - "Ġy um", - "st re", - "ce m", - "Ġpo cz", - "Ġfrag ment", - "ĠÑģлÑĥÑĩа е", - "Ġunder go", - "ĠH ank", - "ce ks", - "ĠF PS", - "Ġoc ur", - "Ġdeter ior", - "æ³ ¨", - "Ġempres as", - "Pa ul", - "Ġ) ))", - "ĠвÑĢем ени", - "Ġsc old", - "×Ļ× ¢", - "Ġsuspect ed", - "Ġaccess ing", - "Ġsubst it", - "Ġhistor ians", - "ä» »", - "Ġдел о", - "Ġsoci ed", - "r one", - "Ġre den", - "Ġext ends", - "epher d", - "Ġbal con", - "ä¸į èµ·", - "ĠSol o", - "Ġpolit ician", - "олÑĮ но", - "Ġirgend w", - "Ġtraum atic", - "Ġrapp er", - "ĠRO BERT", - "Re ally", - "æģ ¯", - "Ġline up", - "AS E", - "Ġcontract or", - "ĠCorpor ation", - "g or", - "ĠTod o", - "ÑģÑĤÑĢ ой", - "F BE", - "Ġnews letter", - "Ġko ÅĦ", - "alt ies", - "ĠпÑĢ иÑĩ", - "ĠHe avy", - "Ġsw ords", - "Ġmanip ulation", - "Ġfun k", - "Ġv Ã¥r", - "ĠTal iban", - "Ġë° ¥", - "Ġac ne", - "ür ü", - "Ġdes wegen", - "ĠD ust", - "Ġsil ic", - "Ġhook s", - "Ġbl ij", - "Ġpet its", - "Ġfil me", - "ĠBere ich", - "ĠSa id", - "Ġimp osed", - "Ġdi ary", - "Ġго ÑĢ", - "ĠG ates", - "Ġal ta", - "å¸ Į", - "Ġch cia", - "ple asant", - "Ġë° Ŀ", - "Ġmoż emy", - "ĠAust ria", - "Ġbro ker", - "Ġsuck ed", - "èĢ ĥ", - "Ġcomp artment", - "Ġcl one", - "Ġ×Ķ× ¢", - "ĠDan ke", - "Ġnoch mal", - "ез д", - "Ġad renal", - "Ġkle inen", - "ãģ¾ ãģĹãĤĩãģĨ", - "Ġsubsequ ently", - "Ġdecent ral", - "Ġgen etics", - "Ġê´ ij", - "Ġmon itors", - "ĠApp lic", - "ĠRep orter", - "w ert", - "Ġwie m", - "ĠMove ment", - "Ġinterview ing", - "Ġhair s", - "Ġpu ò", - "ĠChel sea", - "Ġco her", - "Ġc ot", - "Ġz as", - "Ġpatch es", - "Ġl ah", - "Ñĥн к", - "ĠRe agan", - "ĠMar co", - "c ity", - "Ġdef ender", - "Ġdecor ation", - "ij i", - "Ġl itter", - "Ð ¨", - "Ġj ego", - "RE W", - "ĠP ik", - "ĠHe e", - "ĠI v", - "Ġи де", - "ĠThe ater", - "ĠÑĩаÑģ ÑĤо", - "Ġswe ater", - "Ġhighlight ing", - "Ġa insi", - "Ġdipl omatic", - "ĠNever theless", - "å ³", - "AS ON", - "Ġpúblic o", - "Ġf erm", - "reat ed", - "c od", - "Ġë¬ ¼ë", - "Ġm ister", - "ĠVanc ouver", - "Ġrecogn izes", - "ec d", - "Ġcomplic ations", - "en cial", - "ãģĹ ãģı", - "Ġê°Ģ ì§Ģ", - "ĠUlt imate", - "Ġva ig", - "ĠM erry", - "×ķ× Ĵ", - "ĠMar cus", - "ç¸ ½", - "ow ego", - "Ġm ente", - "S m", - "Ġa ja", - "ĠTa o", - "Ġjud icial", - "Ġentrepreneurs hip", - "Ġнем ного", - "Ġp is", - "Ġer g", - "Ġch rist", - "ĠC urt", - "ĠÑĢаÑģ п", - "λ ε", - "ens ch", - "ÃŃ re", - "Ġfo cal", - "ĠDiam ond", - "av ÃŃa", - "Ġh anno", - "ĠSqu ad", - "Ġassoci ations", - "ĠCreat ive", - "Ġmess enger", - "Ġbe gging", - "Ġdec imal", - "Ġd Ä±ÅŁ", - "Ġmet adata", - "sel s", - "ĠÄ° ÅŁ", - "ữ a", - "Ġdiffic ile", - "d ı", - "Ġs laughter", - "ĠVer g", - "Ġ×Ĵ ×Ŀ", - "ç° ¡", - "æĮ ī", - "ĠTe a", - "ass es", - "O k", - "Ġsynth es", - "ot iation", - "Ġpain ter", - "Ġel bows", - "Ġarchitect ural", - "ĠÑĢ ад", - "Ġgl or", - "im age", - "amp a", - "cul iar", - "ł ¨", - "Ġte ve", - "ĠSt elle", - "ĠB am", - "Ġì´ Ī", - "as is", - "ip edia", - "ĠG I", - "ĠAct ive", - "çĦ¶ åIJİ", - "az i", - "ãĤĮ ãģ¦", - "ĠL ucky", - "íķ ©", - "ĠпÑĢ иÑħод", - "Ġrun way", - "Ġauthent ication", - "Ġpos ible", - "Ġsupp lements", - "Ġsurg ical", - "G en", - "Ġfeas ible", - "D O", - "Ġout look", - "Ġinter vals", - "Ġan ecd", - "Ãł ng", - "Ġstra ps", - "ĠSh u", - "ud d", - "iss enschaft", - "Ġport e", - "Ġcomm itting", - "Ġall ey", - "Ġco venant", - "ĠPed ro", - "less ness", - "ĠSol id", - "ĠM olly", - "Ġн екоÑĤоÑĢ", - "Ġcooper ate", - "åĮ Ĺ", - "oll en", - "Ġtun a", - "Ġkinderg arten", - "ĠS iz", - "Ġduż o", - "ĠM BA", - "ĠGEOR GE", - "ĠF isher", - "å¿ ĺ", - "ĠCa esar", - "ĠкÑĢаÑģ ив", - "ĠDel hi", - "zy m", - "Ġexpl icar", - "ê°Ģ ì§Ģ", - "un s", - "gr ow", - "ĠпÑĢ иÑģ", - "Ġ8 6", - "Ġst ating", - "Ġmass a", - "ch ter", - "Ġì»¬ë Ł¬", - "Ġdep uty", - "S M", - "n oc", - "Ġge ography", - "ĠEnter prise", - "ĠC ant", - "ö z", - "Ġun pack", - "ĠíĻ Ķë", - "Ġsearch es", - "Ġpres idency", - "Ġtri vial", - "Ġp ige", - "ou bt", - "ãĤ ļ", - "ì¼ ĢìĿ´", - "Ġbudget s", - "Ġu b", - "Ġp ne", - "ĠY ale", - "ĠÅŁ öyle", - "reg ular", - "Ġimper fect", - "AR A", - "Ġfam ÃŃlia", - "ur m", - "ĠAdvent ure", - "ãĥ Ĭ", - "c is", - "em ark", - "Ġne go", - "Ġinappropri ate", - "ĠпÑĢи з", - "ĠÑĢ ол", - "Ġdream ed", - "B ry", - "Ġshut tle", - "Ġpill ars", - "Ġb ik", - "in um", - "ĠÑĥ Ñģ", - "ĠNe br", - "Ġperpend icular", - "Ġbook ed", - "ber y", - "Ġv ikt", - "be ar", - "es us", - "Ġвозм ожно", - "¨ ¹", - "Ġpresum ably", - "ĠMem phis", - "Ġambul ance", - "×ķ× ŀר", - "Ġthumbna il", - "Ġmod ification", - "éĩ ı", - "Ġinterpret ed", - "Ġprom o", - "Ġκ ά", - "Ġε ÏĢ", - "Ġacoust ic", - "ĠD B", - "åĵ İ", - "Ġnon etheless", - "ou le", - "Ġpe qu", - "Ġkn ob", - "ãĤ £", - "ĠëıĮ ìķĦ", - "Ġpurch ases", - "ĠÃĩ ünkü", - "Ġdivid ing", - "per form", - "ract ion", - "health y", - "ĠTit le", - "Ġu k", - "Ġcer ca", - "Ġargu ably", - "Ġf ale", - "ë³ µ", - "Ġgam ers", - "Ġutil izing", - "Ġoff ended", - "Ġt ava", - "al ı", - "Ġmed ian", - "Ġinfect ious", - "ĠAn nie", - "Ġsmart phones", - "Ġpar ole", - "åĸ Ŀ", - "ĠEp ic", - "z za", - "Ġun ified", - "Ġê·¸ë ķĮ", - "Ġcur tain", - "ĠÄ ĥ", - "Ġsex ually", - "Ġuns erem", - "ĠCon vention", - "Ġalleg edly", - "Y a", - "ĠH oo", - "en ment", - "æĢ ª", - "íĽ Ħ", - "Ġgig antic", - "Ġnot ing", - "Ġre bo", - "ĠJ ama", - "ĠAl z", - "Ġborrow ed", - "ì¹ ¨", - "Ġper ipher", - "оÑĤ а", - "ĠG B", - "ĠGe ar", - "Ġeconom ically", - "Ġtele fon", - "Ġqu eremos", - "ĠдалÑĮ ÑĪе", - "Ġr as", - "ĠTe ach", - "ic ios", - "at os", - "Ġpl edge", - "b au", - "ĠHim self", - "L ink", - "Ġesper o", - "Ġchrom os", - "ĠP ER", - "Ġer le", - "Ġpod ium", - "ç os", - "Ġnie u", - "Ġf en", - "ĠGO D", - "ĠCh ocolate", - "wer k", - "Ġt ừ", - "Ġsupp ress", - "λ η", - "Ġ24 0", - "Ġsit ä", - "Ġhonest y", - "ĠB io", - "ĠB ard", - "ĠобÑī ем", - "Ġм Ñĥз", - "Ġmar ble", - "ĠÑĨ енÑĤ", - "Ġproc ure", - "Ġrot or", - "ber n", - "Ġtu h", - "Ġhead set", - "at em", - "Ġwarrant y", - "à® ´", - "Ġfil ing", - "ι ά", - "Ġcomp rendre", - "Ġimp ulse", - "Ġsal v", - "wr itten", - "Ġinstit ute", - "K im", - "ĠLGBT Q", - "fic iente", - "H is", - "ĠαÏħÏĦ ÏĮ", - "Ġteen age", - "or us", - "ĠÑĢаз б", - "S ee", - "ĠCons erv", - "á»ģ n", - "ful ness", - "Ġstraw berries", - "ĠAb u", - "и он", - "Ġo lla", - "NO ISE", - "ĠEm ploy", - "Ġwip ed", - "ur ger", - "Ġmod ifications", - "Ġíķĺ ì§Ģ", - "Ġfoot steps", - "Ġhon ors", - "Ġad ul", - "Ġfl ipping", - "ĠH U", - "Z Y", - "Ġintegr ating", - "ب ر", - "ull a", - "Ġnatuur lijk", - "ĠíĹ Ī", - "ĠEth ereum", - "ÙĬ ÙĦ", - "w ed", - "Ġpe aks", - "ĠK es", - "Ġblo om", - "Ġcr ashing", - "Ġ9 11", - "ĠоÑĤ лиÑĩ", - "Ġcontro llers", - "ĠD od", - "Ġвм еÑģÑĤе", - "Ġsort ir", - "å¥ ĩ", - "ĠStra ight", - "ĠGrac ias", - "Ġgro ove", - "Ġto gg", - "Ġìĭ¶ ìĿĢ", - "é ro", - "Ġout ward", - "ĠW A", - "ĠRock y", - "Ġsc am", - "Ġhay at", - "ig nty", - "â Ħ", - "pl ings", - "Ġantibiot ics", - "Ġ ä¸Ģ", - "Ġnever theless", - "j ang", - "com merce", - "Ġspo iler", - "Ġglo ve", - "Ġch atter", - "ĠB Y", - "~ ?", - "Ġíĺ ¸", - "Ġdem ol", - "we chsel", - "im ir", - "Ġra id", - "еÑĢ Ñħ", - "ìŀIJ 기", - "en f", - "Ġcomment ed", - "Ġoptim ized", - "Ġconv icted", - "Ġb ats", - "ĠS B", - "ĠA ur", - "ĠT ong", - "Ġimplic it", - "ĠJan et", - "Ġre ag", - "ãģ ²", - "ĠAdv anced", - "Ġimp ose", - "ש ×Ķ", - "Ġschem es", - "oug her", - "ab olic", - "Ġê±° ì£ł", - "Ġslow ing", - "Ġwt edy", - "Ġdest ructive", - "Ġоп ÑĢед", - "Ġland mark", - "Ġëı Ī", - "ĠWalk ing", - "Ạ¹", - "Ġt ijd", - "ĠK N", - "ĠQu ant", - "ìĺ ¤ë", - "Ġк ÑĢÑĥ", - "Ġper der", - "Ġno ve", - "änd e", - "Ġãģ Ĺ", - "b ia", - "Ġcust ody", - "Ġb iod", - "æĿ± 西", - "Ġdirect ing", - "... âĢĭ", - "Ġre loc", - "Ġdemand e", - "ãĤĵ ãģł", - "Ġo ÄŁlum", - "Ġод на", - "ĠMil k", - "åı ·", - "ĠK ra", - "ĠH onda", - "Ġp ue", - "Ġele kt", - "Ġbegin ners", - "Ġspe ar", - "ÃŃ nh", - "ĠLu ft", - "Ġn ig", - "ĠSchool s", - "Ġfor ums", - "ĠQ in", - "pp o", - "Ġz ag", - "ĠÐ ®", - "Ġtooth p", - "ĠSt yle", - "ì´ Ī", - "Ġpun ct", - "Ġrep s", - "ĠA ly", - "Ġamend ments", - "Ġö z", - "Ġdig its", - "ur ai", - "Ġcha otic", - "ĠMas ters", - "e on", - "ĠC ash", - "ĠC uz", - "Ġbede utet", - "Ġscan ning", - "Ġж д", - "н еÑĤ", - "Ġcertain ty", - "j ek", - "Ġdi jo", - "ĠCl imate", - "Ġr inse", - "Ġk rij", - "vel and", - "Ġsound track", - "ĠSa fe", - "ĠNo va", - "9 4", - "Ġa the", - "ĠVer b", - "ol er", - "ìĿ´ì £ł", - "Ġv in", - "Ġrespir atory", - "ĠStud y", - "ĠC AM", - "Ġav ocado", - "ĠZ hen", - "Ġlat ency", - "Ġfe athers", - "Ġcont ar", - "Ġв еÑī", - "Ġf ark", - "Ġbl ended", - "Ġexpl oded", - "ĠX X", - "ĠBen im", - "Ġalgu ém", - "isto ire", - "Ġconfident ial", - "Ġm ast", - "Ġì ¿", - "ge h", - "Ġdis respect", - "ĠSystem s", - "Æ° a", - "E d", - "Ġw ys", - "Ġex otic", - "Ġgl owing", - "ù ng", - "oun ge", - "è Ħ", - "ани з", - "Ġpal av", - "ĠSw ord", - "Ġg im", - "ĠC row", - "Ġpot ent", - "b ish", - "Ġab used", - "ĠJ ed", - "Ġg ambling", - "ĠS pect", - "Ġinvestig ators", - "æĻ ļ", - "Ġr att", - "Ġdo b", - "ĠD ES", - "h og", - "ĠоÑĤк ÑĢÑĭ", - "íĮ ħ", - "ĠденÑĮ ги", - "Ġíĺ ¹", - "Ġë¨ ¸ë¦¬", - "Ġsat uration", - "Ġinher ited", - "ĠInnov ation", - "ìĹ Īëįĺ", - "Ġtang ible", - "Ġdep ri", - "h ed", - "Ġпом ог", - "Ġslic ed", - "ॠį", - "Ġth ế", - "Å ¥", - "6 8", - "Ġcor ona", - "Ġgift ed", - "Ġso ir", - "Ġhum ility", - "ĠìĿ´ 걸", - "Ġflaw s", - "ĠпÑĢ акÑĤи", - "Ġk ald", - "wa ż", - "y w", - "ãĤĵ ãģ§ãģĻ", - "ir teen", - "Ġcroch ets", - "¦¬ ê°Ģ", - "ĠìłĦ ìĹIJ", - "Ġdes e", - "æ¥ Ń", - "Ġм аг", - "Ġdz iaÅĤ", - "Ġl ég", - "ch anging", - "Ġlle v", - "ÅĦ sk", - "çĶ »", - "Ġ198 4", - "orn s", - "ĠW elsh", - "Ġpharm aceutical", - "Ġpump ing", - "ĠSh aw", - "p unk", - "Ġva ult", - "Ġkin etic", - "Ġhur ricane", - "ĠInc luding", - "ứ c", - "ĠGrand pa", - "ans hip", - "é¦Ļ 港", - "ĠвÑĭ Ñħод", - "н ож", - "ľ ł", - "ut ta", - "Ġê²ģ ëĭĪëĭ¤", - "Ġb az", - "Ġпо ÑĪ", - "Ġpe culiar", - "zy Äĩ", - "ĠEll ie", - "Ġlearn s", - "ĠKr ishna", - "Ġconse cut", - "Ġemp ath", - "ĠD in", - "Ġtrad ed", - "ĠBor is", - "ugg age", - "oll a", - "Ġназ в", - "Ġetern ity", - "Ġв п", - "è mes", - "Ġgra pp", - "b é", - "ĠпÑĢед ÑģÑĤав", - "ĠF C", - "į ëĭĪëĭ¤", - "e ven", - "ĠNebr aska", - "ortun e", - "Ġk arena", - "ĠAg ent", - "Ġst ing", - "ĠP I", - "Ġmunicip al", - "power ed", - "Ġconse gue", - "ĠMan chester", - "Ġrain y", - "Ġbl i", - "Ġk ost", - "Ġhal ten", - "ĠAh hh", - "ins ula", - "er ting", - "ĠاÙĦ Ùģ", - "Ġrel acion", - "Ġk omen", - "Ġd ome", - "Ġpri ests", - "ĠInt rodu", - "rop he", - "sh ore", - "vel t", - "clip se", - "ĠÑĢ ÑĥÑģ", - "×Ļ× ¡", - "Ġsab emos", - "ĠHoll and", - "og i", - "ank i", - "ĠM ats", - "Ġsm oked", - "ull ie", - "Ġeuro pe", - "ĠдейÑģÑĤв иÑĤелÑĮно", - "Ġbard ziej", - "Ġtransform ing", - "ĠE z", - "op ath", - "Ġìĸ¸ ëĭĪ", - "ĠÑģÑĤ ан", - "ằ ng", - "ั à¹ī", - "ĠO uch", - "Ġclear ance", - "ust ain", - "Ġsolid arity", - "Ġpro ving", - "ĠÐĺ н", - "ĠÑģ ÑĬ", - "Ġpro long", - "ад но", - "Ġs os", - "ĠDe al", - "Ġ17 0", - "m ons", - "Ġз ем", - "Ġlo gged", - "Ġlif elong", - "Ġsens ory", - "Ġbe hold", - "ĠF AR", - "èt ement", - "ĠFed eration", - "Ġdod ge", - "ĠSh ir", - "Ġdrag ons", - "ĠAr ctic", - "Äħ ż", - "Å į", - " º", - "Ġden ke", - "Ġpodr ÃŃa", - "co le", - "ÑĥлÑĮÑĤ аÑĤ", - "Ġsystem atic", - "ам а", - "ch os", - "Ġclin ics", - "ĠB S", - "Ġtal es", - "us ions", - "Ġí Ī¬", - "Ġpres ervation", - "Ġl ore", - "ĠProt est", - "á» Ľ", - "å¸ Ĥ", - "Ġacknowled ged", - "ĠIs aiah", - "ĠëķĮ ëĬĶ", - "Ġ× ĺ", - "Ġcompet itor", - "Ġadv ancing", - "z ip", - "Ġtent h", - "ĠLa ure", - "Ġh ints", - "Ġexerc ising", - "ŀ ľë", - "ĠIntell igence", - "u ated", - "OU T", - "op ed", - "Ġaut onomy", - "Ġbrand ing", - "ĠMediter ranean", - "Ñĸ к", - "Ġscrew driver", - "Ġsu pre", - "Ġst ap", - "Ġjurisd iction", - "ĠSetting s", - "Ġfore front", - "ĠF emale", - "com fort", - "Ġmultiplic ation", - "ĠMur ray", - "Ġbo b", - "ĠT as", - "Ġt ahu", - "Ġon un", - "et ter", - "Ġproph ets", - "l ag", - "Ġreven ues", - "Ġpr á", - "Ġupload ing", - "Ġmach inery", - "asc al", - "ĠEst á", - "ĠG oth", - "ĠB ald", - "ĠS aw", - "Ġstri pes", - "ìł ij", - "Ġpow in", - "æĹ¥ æľ¬", - "Ġhost ile", - "Ġdar um", - "Ġprevent ed", - "ожалÑĥй ÑģÑĤа", - "Ġalgun as", - "Ġhop eless", - "Ġz naj", - "Ġread ings", - "Ġcra ving", - "t at", - "ĠP ig", - "Ġli ar", - "çĪ ±", - "Ġmulti player", - "Ġd ale", - "ĠCour se", - "íģ ¼", - "ĠK ita", - "Ġcustom s", - "Ġrespond s", - "end ra", - "è¦ ĸ", - "Ġmet ro", - "Ñģ ол", - "Ġmitig ate", - "Ġopp ression", - "Ġ æĪijåĢij", - "qu inho", - "Ġam mo", - "Ġen fer", - "Ġp ony", - "Ġ ounces", - "° Ķ", - "ĠìĪĺ ê°Ģ", - "Ġdich o", - "ĠDe b", - "Ġwond ers", - "ĠRo ose", - "Ġpri zes", - "ĠA LEX", - "Ġthank fully", - "Ġtiss ues", - "ĠÑĢав но", - "ĠL una", - "intell igible", - "ĠìĻ ¸", - "ê° ij", - "ĠHe at", - "ĠÑģ ид", - "ĠQu i", - "Ġ ions", - "Ġaccommod ation", - "ä¾ ¿", - "ĠK art", - "ien st", - "Ġt arde", - "Ġso aked", - "ĠCase y", - "Ġì´ Ŀ", - "ĠÑĢ Ñĥб", - "Ġdifferent i", - "Ġleft over", - "Ġexch anges", - "sec ond", - "Ġfirst ly", - "Ġbuild er", - "ri en", - "Ġd w", - "Ġboun cing", - "? <", - "olog ÃŃa", - "we alth", - "Ġmed itate", - "ĵ¤ ìĿĺ", - "ĠC raft", - "è§ī å¾Ĺ", - "æĻ ®", - "ri v", - "ĠAgain st", - "Ġcer amic", - "esp ère", - "Ġcompet ent", - "ĠHop kins", - "Ġkil os", - "Ġgra vel", - "Ġpist on", - "Ġfriends hips", - "Ġesc re", - "Ġvo z", - "ĠGes ellschaft", - "Ġunter stüt", - "Ġmu j", - "Ġwarning s", - "p os", - "ĠProfess ional", - "w szy", - "od le", - "b ands", - "Ġteam work", - "stell ung", - "Ġd x", - "åį Ĭ", - "Ġatt orneys", - "Ġweit ere", - "ãħĭãħĭ ãħĭ", - "ĠOrig inal", - "×Ļ× Ĺ", - "Ġbroadcast ing", - "ĠпеÑĢв Ñĭй", - "uch i", - "Ġhe ure", - "Ġgra bs", - "ĠW OR", - "ĠPla id", - "M in", - "Ġp az", - "ĠP uis", - "um u", - "it ates", - "Ġco ats", - "Ġbu en", - "Ġhe ir", - "Ġpne um", - "ש ר", - "ens er", - "ĠJUD GE", - "Ġbl onde", - "á¹ Ľ", - "Ġg ak", - "Ġs ık", - "Ġquot ed", - "Ġequip o", - "Ġw ishing", - "ÃŃ cia", - "Ġver bs", - "çµ Ħ", - "ĠCanad ians", - "Ġgover ning", - "ĠEv ans", - "E uro", - "Ġgen res", - "Ġunters chied", - "ĠBeck y", - "³¼ ê²ĮìļĶ", - "Ġe inge", - "ĠRa ise", - "ol and", - "ĠStr ateg", - "Ġer es", - "ĠVeter ans", - "Ġbreak out", - "Ġsant é", - "Ġad el", - "Ġinvestig ated", - "Ġpe ur", - "Ġag ile", - "Ġrail road", - "ans ka", - "Ġе й", - "Ġexp os", - "ator ies", - "ĠCont ent", - "Ġtruth s", - "ĠTra il", - "Ġgu a", - "Ġp ores", - "Ġwrit ings", - "ĠU hr", - "ĠThat s", - "Ġic ing", - "O C", - "ĠProdu ction", - "Ġcar ne", - "IS S", - "Ġn inguém", - "n on", - "Ġv icious", - "×ķ× Ķ", - "Ġrecon nect", - "Ġcent res", - "ĠK em", - "Ġcre ase", - "ĠìĿ´ë ¯¸", - "айÑĤ еÑģÑĮ", - "Ġб оÑĢ", - "ĠHay ır", - "ĠÑģ Ñĥд", - "Ġún ica", - "owa ÅĤ", - "Ġad her", - "h ua", - "Z Z", - "Ġprecis o", - "Ġcurrent s", - "Ġseason ed", - "ĠIo T", - "ĠB ishop", - "è¨ Ī", - "st ed", - "ĠBern ard", - "ì¤ ĺ", - "æ² »", - "ĠGl enn", - "Ġktóry m", - "ื à¹Ī", - "Ġast rolog", - "ĠK ot", - "å¤ ľ", - "Ġparf ois", - "Ġfor wards", - "ĠW iÄĻ", - "ĠÎ ĺ", - "Ġn ano", - "è» į", - "s ub", - "ĠBr ill", - "Ġgr it", - "Ġc ited", - "g ado", - "Ġmel ts", - "Ġfor cé", - "âĸĪ âĸĪ", - "Ġb ajo", - "Ġdiscret ion", - "° °", - "at ivity", - "Ġsitu ated", - "ãĥ« ãĤ¯", - "Ñīе е", - "åľ° æĸ¹", - "ĠпÑĢин ÑĨип", - "am az", - "Ġaqu arium", - "Ġdissol ve", - "ĠGod s", - "S uper", - "Ġam id", - "z k", - "Ġ ãģĦ", - "éł IJ", - "amp f", - "Ġhel a", - "' !", - "Ġdevelopment al", - "ĠD ise", - "ĠÑĢабоÑĤ аеÑĤ", - "Ġsnaps hot", - "好 好", - "Õ ¸", - "ĠY ue", - "ĠH ulk", - "ĠDo om", - "ĠFel ix", - "Ġré f", - "M ale", - "ç· Ĭ", - "ph ants", - "EN S", - "ĠMe chan", - "ĠG olf", - "åĨį è¦ĭ", - "Ġgener osity", - "ät ze", - "Ġunlock ed", - "Ġ ãĤĴ", - "íĥ ģ", - "ocaly pse", - "Al right", - "Ġê° ľë", - "Ġ×IJ× ij׾", - "ĠKeep ing", - "Ġcollabor ating", - "ch ief", - "ĠFern ando", - "Ġchef s", - "ĠíĶ¼ë ¶Ģ", - "Ġsk ipped", - "Ġperson n", - "Ġax e", - "che z", - "Ġextract ion", - "ĠA V", - "ĠGib bs", - "Ġí ľ", - "Ġs ı", - "I AM", - "V iew", - "ĠGR ANT", - "Ġëª ¸", - "Ġver ification", - "Ġdep icted", - "ĠMo z", - "ou x", - "Ġt ul", - "Ġsc anner", - "Ġcomed ian", - "ĠVol ks", - "ĠJE FF", - "è¨Ĥ éĸ±", - "§ Ħ", - "Ġdistract ion", - "r á", - "ĠIN TER", - "Ġsin cer", - "Ġ×ŀ× ª", - "Ġש ׳", - "Ġconstruct ive", - "ar f", - "ĠëĪ Ħë", - "Ġe co", - "r amos", - "Ġrenew ed", - "in ement", - "ĠU b", - "ĠPe pper", - "ì§Ģ ê°Ģ", - "ĠDar win", - "Ġmerch and", - "Ġv árias", - "è ce", - "N G", - "ĠìľĦ íķ´ìĦľ", - "Ġак ÑĤив", - "ĠUn ters", - "ع ÙĦ", - "Ġint ric", - "omm a", - "ie ving", - "ĠCarol ine", - "åĵ ģ", - "ĠPR ES", - "Ġperform er", - "Ġaut our", - "ãģ¾ãģĽ ãĤĵ", - "Ġutter ly", - "Ġsynth esis", - "Ġles bian", - "Ġretrie ve", - "Ġmane ira", - "Ġimp air", - "Ġment oring", - "ĠSoul s", - "ĠGo Pro", - "ÑĢ аÑĤÑĮ", - "Ġc ose", - "ĠSS D", - "I RE", - "Ġup front", - "ĠA un", - "Ġgam er", - "Ġl itt", - "Ġag gression", - "ĠLike wise", - "ĠBet ty", - "ĠD art", - "ĠD LC", - "ish ment", - "ìŀ¥ ìĿĦ", - "Ġ 对", - "ç» ı", - "c ream", - "ĠBaby lon", - "Ġn ug", - "br ar", - "Ġa ynı", - "am ily", - "b ike", - "ahah aha", - "lo yd", - "Ġmir a", - "Ġper me", - "ĠG aming", - "Ġfirm ware", - "M a", - "Ġassist ed", - "at ics", - "Ġìķŀ ìľ¼ë¡ľ", - "ĠM ental", - "niej s", - "ĠI z", - "ow Äħ", - "Ġt ougher", - "Ġde ed", - "èĭ ¦", - "Ġsty lish", - "ĠTool s", - "ĠH amp", - "Ġsun screen", - "Ġartic ulate", - "i ye", - "и ÑĦ", - "ĠSp read", - "ĠHA VE", - "Ġsw irl", - "Ġspons oring", - "ä» ĭ", - "iov ascular", - "mes i", - "Ġrelax ation", - "ĠÑģво иÑħ", - "Ġmar gins", - "Ġsa ÄŁ", - "ĠPr ide", - "ĠÏĦοÏħ ÏĤ", - "и ÑĨи", - "en ci", - "Do es", - "Ġcor pse", - "Ġend urance", - "Ġí ŀĺ", - "ì¹ ´", - "Ġhair cut", - "Ġinterrupt ed", - "Ġwind y", - "ĠC aleb", - "Ïģ Ïĩ", - "ĠPour quoi", - "Ġhol istic", - "uc lear", - "ĠWho le", - "å£ «", - "A ct", - "Ġgall on", - "c ade", - "ĠReg ional", - "ro ads", - "ĠSch ne", - "á ng", - "Ġиз мен", - "ãĤĪ ãģŃ", - "Ġmen us", - "Ġspl itting", - "Ġpr iced", - "ĠÎ ĵ", - "Ġus ername", - "ĠÐŀ Ñĩ", - "Ġcomp ressed", - "y in", - "Ġguard ian", - "Ġgo of", - "Ġcheck list", - "Ġinter change", - "Ġexped ition", - "Ġex tern", - "Ġinfra red", - "eng o", - "Ġden ying", - "Ġpack ets", - "on ent", - "B B", - "ĠInc re", - "Ġsin i", - "ÃŁ er", - "è g", - "ma al", - "gen eration", - "Ġminor ities", - "Ġlle var", - "Ġnom ination", - "Ġcons id", - "Ġ×ľ× ¢", - "m uÅŁ", - "ĠEs c", - "Ġnumer ator", - "Ġka ik", - "Ġktóry ch", - "ies en", - "Ġv ê", - "ĠUS S", - "ĠPri vate", - "Ġод но", - "Ġal ém", - "ÃŃt ulo", - "Ġlim b", - "Ġforg iven", - "Ġdiscl osure", - "ÏĦ ί", - "Ġning ún", - "Ġtherapeut ic", - "Ġnegoti ating", - "ĠN ike", - "ense ful", - "Ġin cap", - "Ġflag ship", - "t own", - "â Ī", - "ĠÏĢ ολ", - "Ġwol ves", - "Ġviol ations", - "ĠAr nold", - "Ġinterven e", - "Ġhe ater", - "Ġrecurs os", - "Ġma id", - "ê² ¼", - "Ġдав айÑĤе", - "ĠCe lebr", - "Ġca pe", - "ĠSt y", - "ain en", - "s ite", - "b ij", - "Ġп олÑĮз", - "Ġfr amed", - "Ġpublish ers", - "ĠÑĩ ÑĥÑĤÑĮ", - "Ġtempt ation", - "Ġcert eza", - "Ġex empt", - "ìĬ ¹", - "se lling", - "ĠT ask", - "ho on", - "ĠC oc", - "ĠPark s", - "Ġrepet ition", - "ĠÑĤ Ñĥда", - "Ġens l", - "ĠdeÄŁ iÅŁ", - "ĠOr lando", - "ĠMain ten", - "æŃ ¢", - "oc ument", - "ĠH C", - "Ġscoot er", - "Ġнап иÑģ", - "Ġtight er", - "Ġte ase", - "Ġremo ves", - "Ġkij ken", - "ĠÑģÑĥ ÑīеÑģÑĤв", - "Ġth é", - "ĠвÑĭ глÑıд", - "Ġrel ieve", - "Ġmit ä", - "Ġstation ary", - "ö ff", - "p able", - "Ġar ter", - "Ġdé f", - "r ative", - "Ġcon ect", - "Ġsad dle", - "ĠD iane", - "Ġcomm emor", - "fend im", - "S ÃŃ", - "Ġíģ ´ë", - "Ġman ge", - "at te", - "Ġarrog ant", - "Ġrobot ic", - "Ġgi Ãł", - "æĺ¯ çļĦ", - "Ġneighbour hood", - "iss on", - "Ġдв иж", - "ĠR I", - "ĠNorm an", - "b rand", - "am ation", - "Ġraz or", - "Ġmur ders", - "ĠÑĤ Ñĥ", - "Ġwszystk im", - "Ġut ilities", - "Ġmicros cop", - "ê ¿", - "Ġda qui", - "oll ar", - "ĠÐĶав айÑĤе", - "Ġann ée", - "Ġkilomet res", - "Ġhom osexual", - "Ġarchitect s", - "ãģ¡ ãģ¯", - "Ġni ye", - "L ER", - "Ġmicro phones", - "ĠSt unden", - "Ġconsecut ive", - "iend a", - "v änd", - "D ER", - "Ġlif ts", - "ĠMe at", - "Ġsave z", - "íĸ Īëįĺ", - "M en", - "Ġdism ant", - "ê±°ë ¥¼", - "Ġins ulation", - "Ġsc all", - "Ġsp ooky", - "Ġpar c", - "Ġball et", - "ĠWhats App", - "Ġfr anc", - "Ġdeliber ate", - "Ġíħ Į", - "Ġm ars", - "ĠZ ur", - "P r", - "dis ciplinary", - "Ġobs ession", - "м е", - "Ġmarch ing", - "ĠEmer gency", - "ig uous", - "Ġs zy", - "ĠL ands", - "Ġboard ing", - "ĠпоÑĩ ÑĤи", - "Ġenv y", - "Ġcompassion ate", - "Ġmer ci", - "Ġdes irable", - "d ale", - "Ġcan ım", - "ĠAnt ar", - "tem ps", - "Ġconfig ured", - "ĠComp ared", - "ne h", - "ic ating", - "Ġnic kel", - "ÙĪ ÙĤ", - "Ùĥ ÙĪÙĨ", - "op es", - "Ġform ulas", - "ĠÐķ ÑģÑĤÑĮ", - "Ġpo bl", - "ĠP J", - "ĠL ud", - "ä»Ĭ åĽŀ", - "ĠBr id", - "ĠH og", - "ĠBr is", - "J en", - "Ġshad ing", - "ĠY as", - "Ġdistur bed", - "Ġrecomm ending", - "Ġc é", - "ĠH OW", - "ìĹĪ ìĸ´", - "Ġrevers ed", - "ĠInteresting ly", - "iox id", - "åħ Ń", - "Ġìĺ¤ ì¼ĢìĿ´", - "ế u", - "x x", - "Ġou ais", - "ĠYouT ubers", - "ĠR osa", - "ĠH aupt", - "j adi", - "Ġvlog s", - "Ġcult ura", - "ĠLeaders hip", - "ĠH ep", - "Ġill um", - "´ë ıĻ", - "Ġcustom ized", - "Ġmar ca", - "Ġqu atro", - "Ġн аг", - "ĠSpace X", - "ĠE igen", - "ast ing", - "ĠolduÄŁ u", - "Ġfor ts", - "ãģ ī", - "r iment", - "ien cia", - "Ġten ir", - "ro ffen", - "Ġ197 9", - "Ġc ie", - "ĠëIJĺ ê³ł", - "Ġes cri", - "ÏĮ ÏĤ", - "íı ¬", - "uz zy", - "C ong", - "ìĿ¸ ìĿ´", - "G reat", - "s il", - "é ch", - "ãģ¨ ãģĭ", - "Ġmult ic", - "ĠDis k", - "² ķ", - "Ġfaz la", - "Ġle vant", - "Ġab ajo", - "ur ry", - "st ru", - "Ġ먹 ëĬĶ", - "Ġaccess ory", - "Ġдв иг", - "ĠR id", - "20 19", - "Ġdown stream", - "æķ ¸", - "Ġk az", - "ut an", - "Ġchar coal", - "Ġa fect", - "w u", - "Ġcontext s", - "Ġfe ared", - "ĠìĦ ¤", - "Ġhist ories", - "Ġf as", - "ens ible", - "Ġcoco a", - "ill ar", - "ge ons", - "Ġspiritual ity", - "ĠP ew", - "Ġpharm acy", - "Ġpass ions", - "Ġb os", - "Ġall á", - "Ġthri ving", - "ĠRe act", - "Ġoccup y", - "Ġwithdraw al", - "Ġallow ance", - "ĠFra ktion", - "Ġbud dies", - "Ġid le", - "Ġdissol ved", - "Ġpreval ent", - "Ġmil itar", - "Ġsens ing", - "Ġpo jaw", - "Ġanc ora", - "Ġabund ant", - "Ġha irst", - "ãģĤ ãĤĮ", - "Ġtw ee", - "Ġnäch ste", - "ĠMöglich keit", - "Ġho o", - "uff icient", - "Ġfant ast", - "Ġed ible", - "Ġëĸ¨ ìĸ´ì", - "ìĽ ĥ", - "Ġve in", - "uc ci", - "Ġdevot ion", - "Ġconce aler", - "in come", - "Ġrecy cled", - "ĠìĬ¤í ĥĢ", - "Ġpont os", - "Ġdess us", - "Ġvé rit", - "Ġreflect ions", - "ĠA A", - "Ġtake away", - "b are", - "ĠCont act", - "e il", - "ĠHe ar", - "Ġmir ac", - "ĠGer ilim", - "ĠÑģам Ñĭй", - "Ġv ivo", - "Ġkilogram s", - "ĠCr im", - "û t", - "7 8", - "Ġsincere ly", - "ra z", - "Ġë³ µ", - "Ġarri v", - "Ġconcept ion", - "ĠPers ian", - "Ġsj äl", - "Ġst arring", - "ĠìķĦë ¬´", - "ĠFore ver", - "е ÑģÑĤÑĮ", - "Ġve il", - "Ġsubt it", - "od ka", - "ĠоÑĤно ÑĪ", - "Ġcook s", - "ен Ñı", - "K ay", - "Ġni ños", - "ĠPh one", - "Ġstitch ing", - "Ġfinger print", - "é¢ ĺ", - "λ ά", - "Ġded icate", - "ĠL ob", - "Ġblack s", - "ĠB le", - "b out", - "ĠÄij ang", - "Ġe ks", - "Ġsqu ash", - "ĠK ü", - "od i", - "Ġn Æ°á»Ľc", - "Ġvoy age", - "Ġplay ful", - "ĠØ¥ ÙĦÙī", - "an ic", - "Ġcondem n", - "ĠB öyle", - "ĠPol ize", - "ãĤ¿ ãĥ¼", - "Ġay uda", - "Ġp am", - "à¹Ħ à¸Ľ", - "ĠK athy", - "ед ин", - "нов а", - "Ġbr ig", - "eg er", - "Ġe agle", - "Ġvis ions", - "ĠíķŃ ìĥģ", - "Ġsh itty", - "Ġh ott", - "ĠBr itt", - "ut ors", - "ENT E", - "æĽ ²", - "Ġph on", - "ĠB ing", - "Ġпод деÑĢж", - "spr ing", - "æĸ ¯", - "et ten", - "Ġpil gr", - "Ġed iyor", - "енÑĤ Ñĭ", - "ag gio", - "Ġj ul", - "Ġcomp rend", - "te il", - "ĠØ ²", - "Ġperform ers", - "Ġinf amous", - "ĠM K", - "ç ª", - "æ³ ģ", - "ot le", - "e ff", - "ĠH ash", - "Ġcow ard", - "ĠB RA", - "ĠD D", - "Ġcom ida", - "Ġpl ata", - "Ġfl ap", - "ĠMe hr", - "rib ution", - "ĠY emen", - "Ġmyster ies", - "ĠÄ° yi", - "Ġst ell", - "Ġeyel iner", - "Ġdel es", - "Ġnail ed", - "Ġillness es", - "Ġst acks", - "Ġtrabaj ar", - "fl ower", - "ci u", - "Ġcr ude", - "Ġsubstant ially", - "Ġhome m", - "Ġnep hew", - "Ġstamp s", - "Ġcar bs", - "ÑĮ ÑĤе", - "mo oth", - "Ġtun nels", - "ac ie", - "æ³ ¢", - "ĠSe ñ", - "ĠH era", - "ĠìķĦëĭĪ ìĹIJìļĶ", - "ĠWy oming", - "ĠHD MI", - "ĠL is", - "u ción", - "Ġste er", - "о Ñİ", - "иÑĤ а", - "N T", - "Ġìĸ¼êµ ´", - "Ġpal ms", - "Ġne on", - "ов аниÑı", - "Ġfilter ing", - "Ġjou er", - "ĠH ö", - "Ġне Ñģ", - "ê²ł ìĸ´ìļĶ", - "Ġ8 1", - "Ġstory line", - "Ġprz ep", - "Ġthank ing", - "ĠBo eing", - "Ġsoft ly", - "j em", - "алÑĮ нÑĭÑħ", - "Ġflash light", - "Ġп Ñĥ", - "ĠW OMAN", - "ắ c", - "ÃŃ ch", - "Ġlux urious", - "Ġw ün", - "Ġimpact ful", - "Ġcons on", - "re u", - "ir ring", - "if ter", - "Ġconstitu ents", - "èIJ ½", - "Ġ9 4", - "ĠT ou", - "g om", - "ĠìĥĿê°ģ ìĿĦ", - "Ġstere otypes", - "Ġmoż li", - "åĪĨ 享", - "Ĥ ¨", - "Ġpencil s", - "ĠÑģл ож", - "Ġih rem", - "ĠBes ch", - "ĠK oh", - "ĠEnt scheid", - "Ġle k", - "Ġför s", - "Ġtotal mente", - "Ġlive ly", - "Ġent ropy", - "Ġdisc ern", - "ĠÐĹ Ð½Ð°", - "Ġdo v", - "Ġmyth ology", - "è¨ĺ å¾Ĺ", - "apan ese", - "Ġapprox imate", - "аÑĤ ив", - "if iable", - "ĠSe o", - "åĢ Ĵ", - "´ìĭ¬ íŀĪ", - "Ġìĺ ·", - "Ġtempor al", - "Ġi T", - "Ġest at", - "к им", - "Ġspr ink", - "Ġgr und", - "Ġinfant ry", - "Ġsch affen", - "ç´ Ħ", - "Ġan k", - "ri ages", - "ĠYe on", - "ĠMor oc", - "Ġinv asive", - "ģ Ķ", - "Ġparent ing", - "ĠR is", - "ib ile", - "Ġmod s", - "å½ ¢", - "ĠпÑĢов еÑĢ", - "ĠTh ing", - "ĠWhere ver", - "Ġacknowled ging", - "Ġpa wn", - "um mer", - "or b", - "6 9", - "Ġretr ouve", - "Ġrel ies", - "ĠHigh way", - "Ġa we", - "ãģ§ãģĻ ãģĭ", - "ita ire", - "Ġapplic ant", - "Ġais le", - "w orm", - "Ġpay load", - "Ġcar re", - "ĠB ach", - "æł ¼", - "Ġì¹ľ 구ë", - "ни е", - "Ġit ÃŃs", - "onna ise", - "s ol", - "èı ¯", - "alg ia", - "Ġrock ing", - "Ġbest en", - "rit es", - "^ ^", - "ин ой", - "Ġba ixo", - "Ġ기 ìĸµ", - "оÑĤ ÑĢи", - "s im", - "Ġinc arn", - "ëĭ¤ ìĿĮ", - "Ġl ick", - "s ided", - "Ġ7 1", - "f order", - "Ġreson ance", - "Ġte gen", - "Ġmet aph", - "ows er", - "Ġ×IJ× ł×Ĺ׳×ķ", - "? ãĢį", - "Ġsp ielen", - "Ġvoll ey", - "ĶìĿ´íģ¬ ìĹħ", - "lo oked", - "Ġsent enced", - "Ġmultip lying", - "Ġide als", - "Ġwahr scheinlich", - "Ġdepos its", - "bil ir", - "Ġeff et", - "ill on", - "Īë §Į", - "Ġtestim on", - "Ġz awsze", - "ĠпÑĢоÑĨ еÑģÑģ", - "ĠL av", - "ä¸į éĮ¯", - "Ġtrava iller", - "Ġla isse", - "ĠMount ains", - "ĠÑĢ об", - "Ġexam ined", - "it us", - "W as", - "л Ñĭ", - "Ġattrib uted", - "ĠìĬ ¹", - "ĠBar on", - "Ġg ep", - "Ġatt ent", - "ĠColl ection", - "Ġthe at", - "ĠC ai", - "Ġwell s", - "Ġhuman o", - "çĹ ħ", - "ĠH ast", - "ĠÑħоÑĤ Ñı", - "cz as", - "Ġperm its", - "Ġle gg", - "Ġe po", - "ĠF en", - "Ġth i", - "ĠF oi", - "Ġé lect", - "Ġ8 3", - "Ġover th", - "Ġ è¬Ŀè¬Ŀ", - "Ġten ant", - "è² ·", - "N ext", - "Ġpra ised", - "sec urity", - "ĠImp act", - "为 ä»Ģä¹Ī", - "Ġv ouch", - "Ġneg ó", - "Ġun ve", - "Ġcritic ize", - "ĠKen ya", - "Ġtact ic", - "Ġlo gr", - "Ġpo is", - "Ġpap a", - "spe aks", - "ðŁ ij", - "isp ers", - "Ġsur plus", - "Ġcold er", - "åį Ĺ", - "åIJ ¬", - "pl ets", - "ĠV ienna", - "ĠLe ad", - "Ġaer ial", - "ĠT ah", - "енÑĤ ов", - "ĠGree ks", - "C am", - "Ġmá xim", - "Ġk uin", - "ch io", - "Ġdemonst rates", - "an os", - "ĠC ert", - "ĠÑį н", - "Ġblog s", - "ĠìĦľ ìļ¸", - "Ġbe ams", - "ик ов", - "Ġprompt ed", - "Ġfright ening", - "ĠPors che", - "ãģĪ ãģ¦", - "lar ını", - "Ġch illing", - "is phere", - "Ġfl ashing", - "ĠK ard", - "b read", - "Ġex h", - "Ġty cker", - "Ġec ological", - "ĠMa e", - "Ġ×ŀ×IJ ×ķ×ĵ", - "ĠëĤ ĺëıĦ", - "л он", - "ys s", - "Ġper gunt", - "Ġpri x", - "izz ard", - "Ġcan cers", - "Ġ9 1", - "s usp", - "ĠIt em", - "ÅŁ a", - "Ġp est", - "Ġtak Äħ", - "Ġl ymph", - "ĠPat ri", - "f ill", - "Ġrec onna", - "Ġoptim ism", - "Ġmim ic", - "Ġì² ľ", - "ĠMad ame", - "oc y", - "l ining", - "åijĬ 訴", - "erm e", - "Ġfold ers", - "Ġcz ÅĤ", - "uch ar", - "Ġcur so", - "Ġbre ach", - "ни ÑĤÑĮ", - "Ġp amiÄĻ", - "Ġel ig", - "Ġaut op", - "F low", - "Ġprogram med", - "ĠPro cess", - "Ġfig ur", - "ĠS F", - "ĠE les", - "Ġprogram mes", - "Ġdiz zy", - "ìĭľ ê°Ħ", - "Ġли бо", - "Ġsn iff", - "ĠSeb astian", - "ĠH ye", - "Ġ4 000", - "Ġperm ite", - "æ¢ Ŀ", - "Ġза Ñī", - "Ġgu it", - "ĠD ais", - "Ġaccord ance", - "Ġmod ular", - "ogene ous", - "æĭ į", - "Ġpou quinho", - "Ġart illery", - "Ġlub ric", - "Ġvol can", - "ĠN H", - "ðŁ ¤", - "Ġde an", - "R h", - "Ġminist re", - "åĿ IJ", - "ĠIn v", - "ĠBul gar", - "ĠD aten", - "è İ", - "I m", - "Ġorigin ated", - "ĠN ixon", - "inte gr", - "Ġlack s", - "ĠN acht", - "ìĸ´ë Ĥĺ", - "cam era", - "Ġrad ish", - "ki ye", - "Ġang es", - "Ġpré f", - "j uk", - "ĠBe e", - "ĠB U", - "ĠвоÑģ п", - "ĠB T", - "ê mes", - "ĠSt ück", - "ĠIn k", - "æĪĸ èĢħ", - "ĠSerge ant", - "ĠMult ip", - "Ġhiç bir", - "ĠС ам", - "ĠD é", - "ol ph", - "ìĸ ¸", - "Ġimp at", - "ĠìķĬ ê³ł", - "ĠÑĤак ого", - "ĠнавеÑĢ ное", - "Ġunpredict able", - "Ġm end", - "ĠìĹĨ ìĸ´ìļĶ", - "Ġjakie ÅĽ", - "Ġann i", - "Ġdon né", - "ĠK irsty", - "Ġrectang ular", - "Ġempez ar", - "ĠEx change", - "ê° Ķ", - "Ġé conom", - "ãģĵ ãĤĵ", - "el in", - "re ibt", - "Ġ×Ķ× ¤", - "Ġc emetery", - "Ġespañ ol", - "ol in", - "лÑİ Ð´", - "Ġgr âce", - "all en", - "ĠPh ilos", - "ĠEr st", - "Ġìĥ Ī", - "ĠV id", - "G ive", - "O H", - "μ ο", - "ĠP are", - "Ġmetabol ism", - "Ġma ple", - "Ġax le", - "ĠD y", - "Ġkomm e", - "Ïİ Î½", - "Ġgreat ness", - "Ġver ified", - "Ġsp é", - "ĠFahren heit", - "ĠB ren", - "ĠConf eder", - "Ġhist oire", - "Ġelimin ating", - "ĠAd ding", - "ĠAb i", - "æĿ İ", - "Ġhospital ity", - "t im", - "Ġbon ito", - "Ġpart es", - "ĠдÑĢÑĥг иÑħ", - "ĠSh ay", - "ĠS ed", - "Ġreg rets", - "Ñı ми", - "Ġten ants", - "éĢ Ł", - "ĠP TS", - "Ġdev i", - "ĠL ate", - "ue z", - "Ġsö yl", - "ãĤ »", - "Ġìŀ¬ë °Į", - "Ġtogg le", - "Ġmas king", - "алÑĮ ного", - "Ġpers ön", - "Ġamer ican", - "f ik", - "ĠR GB", - "ens on", - "ĠK A", - "ww ww", - "ĠÑĢ ег", - "met ics", - "Ġeduc ator", - "ãĤ· ãĥ«ãĤ¯", - "p ark", - "елÑĮ зÑı", - "ar us", - "ÑĢ еÑĤ", - "Ġfe ito", - "Ġcho ir", - "Ġlar go", - "Ġe ens", - "Ġwat ts", - "ĠSing le", - "Ġsuscept ible", - "ic er", - "Ġв клÑİÑĩ", - "Ġp us", - "íĻ ĺ", - "E ng", - "Ġfant as", - "Ġspecific ation", - "Ġconfront ed", - "ĠColumb us", - "ив еÑĤ", - "ar ım", - "Ġcaffe ine", - "mun ition", - "Ġmig rants", - "l ide", - "it ations", - "ĠG eme", - "Ạ«", - "Ġpl anner", - "Ġstim ulate", - "Ġapro xim", - "ce u", - "ĠN om", - "Ġv og", - "ĠÑĢ аÑģÑĤ", - "Ġense ñ", - "Ġsell ers", - "Ġgut en", - "z d", - "C al", - "Ġdescri pt", - "Ġrecon ciliation", - "z inho", - "á¹ĩ a", - "ãģĺãĤĥ ãģĤ", - "acy j", - "ĠCO L", - "s aw", - "ĠíĻķ ìĿ¸", - "Ġvar it", - "Ġpartner ing", - "Ġdet ention", - "Ġbomb ing", - "c lapping", - "ien cies", - "ond u", - "AM E", - "Ġê°Ļ ìĬµëĭĪëĭ¤", - "c ÃŃa", - "ĠпоÑģ ÑĤо", - "ĠAS MR", - "Ġhome page", - "Ġsi è", - "an tha", - "ĠP oll", - "Ġ igen", - "cy ch", - "Ġê°ij ìŀIJ기", - "Ġconsider ably", - "ä»ĸ çļĦ", - "ĠAr ist", - "Ġwith stand", - "Ġqual itative", - "ĠK raft", - "ĠÑį лекÑĤ", - "ĠBe ad", - "екÑĤ ив", - "Ġcr ushing", - "ì³ IJ", - "Ġnav y", - "ÙĪ Úº", - "s ho", - "Ġo ak", - "ipp ers", - "Ġso ils", - "Ġpig ment", - "Ġev itar", - "ãĥ ĩ", - "Ġf use", - "ĠD ale", - ": \"", - "Ġcompl ètement", - "Ġke l", - "๠Ĩ", - "Ġqu atre", - "ĠU M", - "Ġë§ IJë", - "æł ¹", - "ÃŃ r", - "Ġle isure", - "ĠH ousing", - "Ġfold s", - "est ion", - "AR S", - "Ġm ash", - "urp ose", - "Ġaccum ulated", - "ĠSt uff", - "èª ŀ", - "Ġtap es", - "ĠÑģ илÑĮно", - "ĠLO VE", - "Ġ198 2", - "Ġsc ars", - "Ġcapital ist", - "ĠN ed", - "Ġsoft en", - "Ġnot ably", - "Ġforcé ment", - "ĠRa um", - "Ġнеоб Ñħод", - "Ġtrad emark", - "Ġfert ig", - "Ġ? !", - "æĹ ł", - "Ġreinfor ced", - "Ġre charge", - "ĠPut ting", - "Ġvill ains", - "Ġhand ic", - "Ġadvertis ement", - "ت ÙĬ", - "ĠÑģ Ñĥм", - "ĠR iley", - "×ķ× ij×", - "äº ¬", - "O s", - "Ø§Ø ²", - "B oy", - "Ġsqu ish", - "ock et", - "Ġtest ify", - "æ¼ Ķ", - "Ġ×ľ× ŀ×", - "Ġм аÑģÑģ", - "man uel", - "ĠArk ansas", - "if fe", - "Ġanalyst s", - "ĠDe af", - "Ġj ó", - "Ġgrocer ies", - "ĠWhe el", - "ĠÑĢ иÑģ", - "Ġc òn", - "ĠC ob", - "Ġpris ons", - "è ve", - "ĠCab inet", - "Ġpos ed", - "Ġguer re", - "ĠL loyd", - "Ġcl erk", - "Ġcr ises", - "ĠSh o", - "ĠO re", - "ĠFoot ball", - "ĠAd vis", - "ĠZh eng", - "è į", - "ĠAM Y", - "Ġun for", - "Ġmon aster", - "Ġcomp ile", - "Ġimm ortal", - "at able", - "Ġpar ano", - "Ġt iver", - "ĠStep h", - "ĠFu ÃŁ", - "Ġdisc ontin", - "Ġr ipe", - "Ġhack ing", - "Ġs iendo", - "Ġsegu ro", - "alt res", - "Ġand eres", - "Ġë ¦¬ë", - "Ġexp orts", - "æŃ ¥", - "Ġtab ii", - "Ġ기 ëĭ¤ë", - "Ġbother ing", - "Ġpick le", - "ĠBRI AN", - "Ġalt ar", - "ĠпÑĢи б", - "Ġtransfer ring", - "ĠV ors", - "ĠÙĩ ÙĪ", - "ĠZ a", - "ĠFr ances", - "Ġbrow se", - "em it", - "Ġche wing", - "ĠFred dy", - "Ġedit ors", - "ä lle", - "Ġí ĮĢ", - "ĠS que", - "ĠC ultural", - "aw k", - "ĠS ache", - "ĠCar bon", - "ắ t", - "F L", - "ĠN GO", - "pe ÅĤ", - "ĠS ou", - "Ġh vor", - "un intelligible", - "Ġë² ķ", - "Ġ °", - "i in", - "Ġ×¢ ×Ŀ", - "Ġder rière", - "Ġczy m", - "ĠAp ost", - "Ġregard er", - "Ġag rade", - "ĠC andy", - "Ġma re", - "Ġintrodu ces", - "bird s", - "Ġuniqu ely", - "Ġm uk", - "Ġcook er", - "Ġcrew s", - "Ġje ito", - "ER T", - "¶ Ħë", - "n isse", - "Ġe f", - "Ġcart e", - "ĠY ak", - "ĠP AT", - "и но", - "bok ki", - "Ġm ates", - "Ġdist int", - "Ġì½Ķë¡ľ ëĤĺ", - "Ġy ıl", - "Ġκ άν", - "Ġconfigur ations", - "eng a", - "re cht", - "H appy", - "ãĤĦ ãģ£ãģ¦", - "in vest", - "Ġreconst ruct", - "ĠÑįÑĤ омÑĥ", - "Ġmos que", - "ra um", - "Ġvoy ez", - "ĠN BC", - "ĠìŀIJ ìĭł", - "Ġstur dy", - "Ġк ап", - "Ġans ch", - "al id", - "Ġmas ih", - "ĠR EP", - "Ġì½ Ķë", - "Ġded uct", - "Ġsal ir", - "w urf", - "il ot", - "ĠM utter", - "old s", - "ĠF EMA", - "ĠB ib", - "Ġneighb oring", - "Ġbl iss", - "Ġíĺ ¼", - "ли ÑģÑĮ", - "ĠÑĤÑĢ еб", - "Ġ å°±æĺ¯", - "Ġgren ade", - "Ġe gal", - "Ġfin ely", - "Ġpet als", - "Ġke er", - "Ġch yba", - "Ġsk ipping", - "Ġth irteen", - "Ġgrav y", - "ĠS AT", - "6 1", - "Ġн ог", - "Ġmin s", - "IT E", - "Ġso zial", - "íķĺë ©´ìĦľ", - "rukt ur", - "Ġвозм ож", - "Ġоп ÑıÑĤÑĮ", - "Ġar th", - "ĠCub an", - "Ġtre asures", - "Ġfertil izer", - "Ġawak ening", - "Ġë°± ìĭł", - "Ġr all", - "Ġdep ict", - "ĠP ablo", - "Ġninete en", - "Ġw att", - "Ġentire ty", - "K S", - "ĠWood s", - "S ch", - "ĠÚ© ÙĪ", - "ĠD ry", - "ãģ ŀ", - "u ve", - "Ġreconst ruction", - "Ġanat omy", - "Īë ¥¼", - "Ġb aba", - "Ġlisten er", - "Ġshar pen", - "ĠPer u", - "ĠвÑĭ з", - "Ġrecre ation", - "Ġiniti ate", - "Ġcal or", - "ĠN aj", - "ge e", - "ĠFe els", - "ĠSnap chat", - "ĠT et", - "ĠN est", - "ĠD af", - "ĠFin ish", - "ĠÑĤак им", - "ú c", - "iz ens", - "Ġsp ins", - "Ġemb ry", - "Ġpass ages", - "Ġc ient", - "Ġjust ification", - "ä»ĸ 說", - "Ġolm az", - "Ġflood ed", - "Ġemo ji", - "Ġembr acing", - "Ġdisc ard", - "ĠBas ic", - "ag og", - "ĠìľĦ íķ´", - "Ġas ylum", - "er in", - "Ġf im", - "Ġnin ja", - "Ġautom ate", - "Ġaller gic", - "ÿÿ ÿÿ", - "am am", - "Ġм аÑĢ", - "ĠO i", - "ä us", - "Ġin duct", - "ĠB EN", - "Ġz ÅĤ", - "Ġkaż dy", - "ĠAM P", - "n ÄĽ", - "S ure", - "Ġqu il", - "Ġespe c", - "ro k", - "BS CRI", - "Ġlie be", - "p us", - "ach sen", - "Ġcr icket", - "ëĬ IJ", - "ĠFr ame", - "ekk ür", - "ar b", - "Ġp ÅĻ", - "иÑģ Ñģ", - "Ġzeg gen", - "Ġdou bles", - "ĠD re", - "t est", - "ins p", - "bo ys", - "Ġm ão", - "ĠVer se", - "Ġmus cular", - "ĠMA LE", - "Ġd ulu", - "Ġoccas ional", - "L o", - "conom ic", - "Ġv ak", - "Ġrem edy", - "å¤ ł", - "ĠâĻªâĻª âĻª", - "ve m", - "Ġön em", - "ĠkarÅŁ ı", - "ĠSh arp", - "h ur", - "Ġë°© ë²ķ", - "Ġgrand son", - "Ġakt iv", - "ĠTh rones", - "ĠìķĪ ìĹIJ", - "Ġto ts", - "Ġsub d", - "ĠPa ula", - "Ġgra ves", - "ĠB rent", - "Ġник ÑĤо", - "Ġsö z", - "Ġcre c", - "ĠVlad imir", - "çĸ «", - "Ġп ой", - "Ġ\" -", - "Ġp sy", - "at ri", - "id an", - "Ġa ún", - "Ġstandard ized", - "ì¹ ĺë", - "Ġк ÑĢов", - "ĠZh u", - "s omething", - "Ġ7 50", - "Ġmuj eres", - "Ġa it", - "éĹ ´", - "ag u", - "Ġcorrect ed", - "ik ka", - "el ed", - "ĠCare er", - "ow ym", - "Ġroomm ate", - "Ġdescend ants", - "ĠNapole on", - "ĠÐĶ о", - "íĸĪ ìĸ´ìļĶ", - "Ġbun un", - "ĠMich a", - "ç· ļ", - "Ġdesc ob", - "P I", - "Ġpalab ra", - "Ġtrack ed", - "Ġdepend ence", - "ĠBar ack", - "åģ ĩ", - "Ġfert ility", - "ĠSouth west", - "Ġincom plete", - "Ġcomun ic", - "Ġcomp ris", - "ĠRest aur", - "Ġac ron", - "κ α", - "Ġapprent ices", - "Ġmus st", - "ĠA br", - "Ġpent ru", - "ĠCons ort", - "ĠAve c", - "Ġdum plings", - "L R", - "Ġwszystk ie", - "Ġsw amp", - "н ев", - "ugg le", - "Ġwater color", - "Ġprot on", - "ĠEspa ña", - "ock ing", - "ов ал", - "Ġtak im", - "V ery", - "Ġdement ia", - "ĠÅŁey i", - "J ac", - "ĠMac Book", - "ĠL iv", - "ffic ients", - "ĠH unt", - "Ġover lay", - "æĦŁ 覺", - "ĠSky pe", - "p unkt", - "Ġconf ined", - "ĠAd rian", - "ر Ùĥ", - "ĠJe ep", - "Ġenqu anto", - "Ġan est", - "оÑĤ веÑĤ", - "Ġм енÑĮ", - "Ġirrig ation", - "á»ij n", - "Ġeight een", - "ĠP on", - "Ġresc ued", - "Ġ198 3", - "r ü", - "ja e", - "ĠJe ong", - "Ġamazing ly", - "ĠF DP", - "Ġback stage", - "c ue", - "ĠÏĥÏĦη ν", - "ĠاÙĦØ µ", - "Ġlivest ock", - "ĠW arner", - "Ġmaj ors", - "ãĥģ ãĥ£", - "Ġcooper ative", - "ĠBr ady", - "ra ined", - "rie b", - "Ġ×ij× ŀ×", - "Ġдов олÑĮно", - "ĠF E", - "Ġle aked", - "ĠMerc ury", - "Ġpersu ade", - "Ġtransform er", - "ĠNor weg", - "ĠìĹ¬ë Ł¬", - "Ġzrobi Äĩ", - "Ġcard iovascular", - "ĠCr ash", - "Ġg ossip", - "а ÑģÑĤÑĮ", - "Ġì ª½", - "Ġsw ept", - "ĠH orn", - "ĠAt é", - "Ġbu kan", - "ĠK aw", - "K Y", - "ĠSt ories", - "G ary", - "Ġgard ening", - "ĠQuick ly", - "ĠFal con", - "Ġov at", - "c ı", - "ĠCom plet", - "ĠD ate", - "ĠпÑĢ им", - "Ġlä uft", - "ĠAud rey", - "ĠW ent", - "Ġpel ÃŃcul", - "Ġcar riage", - "Ġun acceptable", - "ny mi", - "ĠÑģл ÑĭÑĪ", - "Ġter re", - "uell ement", - "EE EE", - "Ġpharm ac", - "h ões", - "Ġz ich", - "Ġmig rate", - "ĠF ry", - "ñ ana", - "ĠM uito", - "EO VER", - "Ġfort ress", - "ĠCom pan", - "ĠJ SON", - "ord nung", - "Ġw arto", - "Ġun gef", - "ìħĶ ìĦľ", - "ĠÑĢ ок", - "Ġpad dle", - "J ared", - "Ġsubm itting", - "Ġl atch", - "Ġf ug", - "Ġк оÑģ", - "ĠE f", - "Ġlaunch es", - "Ġf t", - "ote chn", - "Ġtrave lled", - "ا Ùģ", - "éģ ķ", - "Ġpro ch", - "Ġded im", - "8 3", - "Ġreb ound", - "ĠL U", - "p ath", - "ĠÑģп ÑĢав", - "Ġö l", - "ĠíĤ ¤", - "Ġpriv at", - "Ġtr actor", - "ĠAtt ention", - "S er", - "Ġcos es", - "á ria", - "p al", - "ĠìĿ Ģ", - "Ġsuccess or", - "Ġconnect ors", - "ĠÑĥÑģÑĤ анов", - "Ġgen ocide", - "Ġsufficient ly", - "ĠA ixò", - "Ġstabil ize", - "Ġcon gest", - "Ġcar ving", - "Ġz ost", - "ĠбÑĭ ÑģÑĤÑĢо", - "Ġshort est", - "Ġli vel", - "Ġ8 9", - "éģ Ĭ", - "Ġer k", - "Ġport raits", - "ॠĢ", - "è ĺ", - "bo at", - "ll ah", - "AN C", - "Ġempir ical", - "ĠE cho", - "ĠNeder land", - "è¿Ļ ä¹Ī", - "N et", - "Ġcuid ado", - "ĠR oma", - "Ġc alf", - "Ġgi ants", - "ĠExpl orer", - "ĠColl ect", - "al ition", - "ĠDest iny", - "Ġaus ge", - "ĠE du", - "ĠC lo", - "Ġear rings", - "ĠTr ack", - "ĠR OS", - "ĠBe lle", - "çĻ ¾", - "Ġpu eda", - "Ġday time", - "Ġsupp lier", - "ĠS V", - "ĠEx hale", - "Ġgal era", - "c ourse", - "Ġcent imeter", - "ĠB ast", - "m ud", - "Ġsang at", - "ĠPhys ical", - "Ġpriv ately", - "Ġtr ata", - "lyn n", - "ill i", - "Ġë© ĶìĿ´íģ¬ìĹħ", - "Ġcryst all", - "Ġpod s", - "ả n", - "in ator", - "ĠRec ords", - "å® ĺ", - "ÄŁim iz", - "isse ment", - "h are", - "h adow", - "ĠD K", - "ĠìķĮ ê³ł", - "Ġw yn", - "Ġrequest ing", - "ĠD onna", - "ĠìĹ ´ìĭ¬íŀĪ", - "ine a", - "Ġex ert", - "ĠDun can", - "Ġв еÑĩ", - "ĠH ah", - "ठĤ", - "ĠL if", - "ĠF inding", - "ĠNo v", - "Ġзн ак", - "Ġо ÑĦ", - "ĠQu è", - "Ġquarter back", - "ĠÑĦ ак", - "Ġbipart isan", - "ÄŁ in", - "Ġné cess", - "Ġrefer endum", - "Ġcomp iler", - "Ġprob abil", - "ед и", - "Ġtrad er", - "æĺ ĵ", - "ĠR um", - "ge me", - "Ġd io", - "ĠbÄĻdzie my", - "ĠÏĢ ά", - "ê¾ ¸", - "×ķ× ĺ", - "Ġठķ", - "Ġбл аг", - "Ġscal p", - "ĠPa use", - "Ġcapt ion", - "Ġend anger", - "Ġen lar", - "Ġrot ten", - "ãĥĥ ãĥĪ", - "Ġw ah", - "èĤ ī", - "Ġd zi", - "ĠInst all", - "A y", - "Ġcre ar", - "енÑĤ а", - "Ġwe ighing", - "Ġbutter flies", - "ĠG ast", - "äº ķ", - "h orn", - "war z", - "IC EOVER", - "Ġнай ÑĤи", - "Ġcoe fficients", - "ç°¡ åĸ®", - "ĠSp encer", - "ĠH igher", - "Ġcow ork", - "å¨ ĺ", - "ĠкоÑĤоÑĢ ое", - "Ġmon it", - "Ġdys function", - "ĠÑģÑĤ анов", - "Ġtour naments", - "Ġoy ster", - "B N", - "Ġtr ud", - "sl ow", - "ĠPen ny", - "ĠOd ys", - "æ r", - "Ġf ou", - "Ġenjoy ment", - "аÑĤ Ñĭ", - "Ġwygl Äħda", - "алÑĮ наÑı", - "ĠProt ect", - "Ġmo y", - "Ġcl aw", - "Ġsusp icion", - "Ġsacrific ed", - "Ġgost o", - "B ig", - "Ġaggress ively", - "Ġvor ne", - "ãĥ ł", - "Ġbl amed", - "ĠSe hr", - "פ ר", - "c ito", - "Ġse als", - "Ġmu jer", - "ĠWe ird", - "Ġfore ns", - "Ġcontrib utes", - "est ra", - "Ġp og", - "L OL", - "Ġhacer lo", - "о ÑĤÑĮ", - "f iction", - "7 9", - "λ ο", - "大 æ¦Ĥ", - "å£ °", - "ĠÑĤ об", - "ĠG S", - "ĠCl ara", - "ite z", - "Ġadvoc ating", - "ĠíĶ Ħë", - "s ung", - "Ġvert ices", - "Ġnavig ating", - "Ġeurop é", - "çļ Ĩ", - "Ġslow ed", - "Ġfore ground", - "ĠIndust rial", - "Ġad ore", - "ìĭ Ń", - "Ġcré er", - "æŀ Ĺ", - "chn itt", - "Ġun aware", - "Ġcur ly", - "ent ar", - "Ġl er", - "Ġprohib ited", - "ĠHero es", - "ĠRe ed", - "u ca", - "Ġsm ok", - "Ġkun na", - "zeit ig", - "im men", - "ĠL un", - "Ġаб ÑģолÑİÑĤ", - "Ġdeg li", - "Ġvill agers", - "Ġpres et", - "z ept", - "ud s", - "Ġem it", - "ä½ł è¦ģ", - "Ġë ī", - "ëĬĶ ì§Ģ", - "нак о", - "Ġos ób", - "Ġ196 9", - "ĠÐIJ ÑĢ", - "Ġman chmal", - "ĠBro ck", - "Ġmant ra", - "ĠW IL", - "b ach", - "in ä", - "el as", - "kel n", - "Ġdisci ple", - "Ġqual c", - "Ġde hyd", - "ìĿ´ë Ŀ¼ëĬĶ", - "A f", - "ìĦ± ìĿ´", - "R yan", - "Ġpupp et", - "ĠдÑĢÑĥг ие", - "Ġr ud", - "Ġp ending", - "P lus", - "ĠìķĬ ìĿĦ", - "Ġb á»ĭ", - "ĠSe ga", - "ç e", - "Ġprogram mer", - "b li", - "Ġun l", - "Ġensl aved", - "Ġsoci été", - "Äģ h", - "Ġinherit ance", - "ĠBang l", - "erm aid", - "Ġpractition er", - "ĠSt alin", - "ĠUs er", - "ci ble", - "Ġcard iac", - "ĠKore ans", - "Ġdump ed", - "Ġ×Ķ ×Ļ×Ķ", - "á is", - "Ġhydraul ic", - "oubt edly", - "ĠP it", - "Ġpic nic", - "Ġbehö ver", - "ĠÑģм ог", - "Ġbra king", - "é» ij", - "ut ar", - "ĠìĦ ¸ë", - "ub l", - "Ġü z", - "Ġmaj esty", - "Ġb ers", - "ut able", - "Ġhot ter", - "çħ §", - "ÛĮ ÙĨ", - "Ġbi ases", - "Ġsubject ed", - "Ġnaught y", - "Ġcir cus", - "ãģĹ ãģĭ", - "ĠIm medi", - "ĠSte fan", - "ĠTri ple", - "en k", - "Ġw it", - "Ġrecy cle", - "em ie", - "d ated", - "Ġun load", - "Ġpop ula", - "ch in", - "Ġyield s", - "Ġeng lish", - "ĠBon nie", - "Ġsp iders", - "à ģ", - "Ġer osion", - "éĥ¨ åĪĨ", - "ĠN ICK", - "иÑı Ñħ", - "Ġimp art", - "Ġк ни", - "Ġres olutions", - "Ġlith ium", - "Ġconver gence", - "ĠT ara", - "Ġдв е", - "th s", - "ĠCind y", - "æĪij è¦ģ", - "å¹ «", - "ĠD IE", - "Ġass urance", - "Ġоп иÑģ", - "Ġbu ckets", - "Ġc ues", - "ĠQu iet", - "Ġsimilar ity", - "Ġfound ational", - "ĠMin ist", - "æ» ¿", - "Ġp ian", - "Ġcent r", - "Ġnum b", - "Ġmon ks", - "uj ourd", - "en zie", - "Ġskate board", - "Ġd latego", - "ĠÑģ оÑĤ", - "ĠA E", - "Ġmaster piece", - "ĠSol omon", - "ĠRed dit", - "Ġr iot", - "ab l", - "ĠJ azz", - "Ġelectromagn etic", - "Ġinsec ure", - "ĠComp et", - "ger ies", - "об од", - "ł ×ķ", - "ðŁ Ĵ", - "Ġsen ators", - "ĠBris bane", - "ĠAl b", - "utter ing", - "ĠAll ow", - "z ero", - "Ġp ai", - "ĠÐIJ лекÑģ", - "ĠDis play", - "ĠBl ade", - "ĠApp s", - "Ġp ä", - "Ġд еÑģÑı", - "Ġque lla", - "ĠGa o", - "ен нÑĭÑħ", - "Ġspoil ers", - "Ġgall ons", - "ĠÙĦ ÙĬ", - "ĠZ ion", - "æľī ä¸Ģ", - "on ie", - "rag t", - "ĠCh and", - "Ġë³ ij", - "Ġbl unt", - "Ġus u", - "ĠK ad", - "ra kt", - "Ġcin ematic", - "Ġam munition", - "re ne", - "Ġfour teen", - "ĠC arn", - "c rit", - "Ġten ure", - "v u", - "Ġprincipal mente", - "Ġalle en", - "éĢĻ ä¸Ģ", - "Ġkompl ett", - "Ġdü ny", - "J ames", - "Ġrecept or", - "Ġones elf", - "g uru", - "Ġmerch ant", - "l iness", - "Ġover looked", - "Ġharmon ic", - "éķ ¿", - "ies o", - "×ķ× ŀ", - "col m", - "ĠпÑĢо екÑĤ", - "ĠAd a", - "ا س", - "T im", - "Ġrecur ring", - "Ġproceed s", - "ĠPart icularly", - "ĠDown load", - "et rical", - "Ġmat rices", - "Ġproyect o", - "anc ies", - "ĠUh m", - "Ġc aves", - "Ġìĸ´ë ł¤", - "ĠLe af", - "Ġоб ÑĭÑĩ", - "ĠìĿ´ì ľł", - "Euro pe", - "Ġt Äħ", - "Ġpul s", - "Ġtak iego", - "ÐĿ е", - "G U", - "Ġfor s", - "Ïģ γ", - "Ġfot os", - "Ġ) )", - "Ġë© ¤ë", - "Ġaqu ilo", - "ĠK urd", - "ï¸ ı", - "pt ic", - "ĠD ort", - "Ġmis ery", - "aus o", - "åĬ Ł", - "chuck ling", - "ĠR idge", - "ĠíĸĪ ìĬµëĭĪëĭ¤", - "Ġ* **", - "å® ¢", - "ĠHmm m", - "Ġge ographic", - "Ġany s", - "Ġtal vez", - "Ġske let", - "Ġsign atures", - "Ġlit ers", - "IJë ©´", - "ĠÑģво его", - "Ġski ing", - "ĠÐľ оÑģ", - "Ġadop ting", - "Ġha ft", - "Ġsymm etric", - "ĠL iqu", - "Ġthy roid", - "Ġmis in", - "lud e", - "Ġh ull", - "ĠX D", - "ĠG ust", - "ze ich", - "Ġvibr ations", - "Ġes emp", - "ĠвÑģ Ñİ", - "ĠQu em", - "Ġü brig", - "ĠS ke", - "ĠLyn ch", - "room s", - "art et", - "f est", - "Ġfr üher", - "Ġl ure", - "ä¸į好 æĦıæĢĿ", - "ĠìķĮ ìķĦ", - "ĠW IN", - "ĠR YAN", - "ĠкоÑĤоÑĢ ÑĥÑİ", - "ĠK ash", - "Ġ×Ķ× ŀ", - "Ġsaf eg", - "ĠHall elujah", - "Ġдв ÑĥÑħ", - "Ġstap le", - "Ġsed iment", - "ĠAct s", - "Ġbl aming", - "Ġmain land", - "Ġsport ing", - "Ġdecor ations", - "Ġexecut ing", - "Ġpar an", - "ĠDoll ar", - "Ġproject ions", - "Ġcommission ed", - "Ġb our", - "ö m", - "Ġste amed", - "ĠëŃ ĺ", - "Ġpet rol", - "Ġcel ular", - "å¸ ¶", - "ĠHung ary", - "Ġrent ed", - "Ġв аÑĢи", - "bb ie", - "Ġsé cur", - "ü ll", - "Ġsw ings", - "bet ween", - "Ġи ÑĤ", - "est ro", - "Ġnie mand", - "ĠìĤ ¼", - "ĠP ardon", - "ess es", - "ĠM ID", - "Ġcentral ized", - "ĠAl ien", - "cul os", - "Ġcr ise", - "裡 éĿ¢", - "Ġcl asse", - "beit et", - "i ÄŁi", - "Ġwh ales", - "Ġper imeter", - "Ġty ing", - "Ġstr ony", - "Ġlike wise", - "ĠP unch", - "D a", - "ĠBapt ist", - "Ġsort ing", - "Ġ iv", - "Ġíķ ©", - "Ġre hab", - "Ġet a", - "ri ver", - "Ġsa i", - "ãģĦãģŁ ãģł", - "od us", - "ãģĬé¡ĺãģĦ ãģĹãģ¾ãģĻ", - "Ġess ayer", - "Ġtur tles", - "ĠHaz rat", - "Ġfab rics", - "Ġcav ity", - "Ġpon ieważ", - "Ġschle cht", - "Ġs alsa", - "ÅŁ ekkür", - "Ġse ating", - "Ġeconom ists", - "Ġman g", - "Ġsegu inte", - "Ġr ang", - "Ġrat ios", - "Ġconst ell", - "Ġlong temps", - "u ating", - "Ġspo iled", - "Ġrecip ients", - "Ġsn iper", - "ä¹ĭ åīį", - "ìĬµ ëĭĪê¹Į", - "Ġw p", - "ĠLIN KE", - "Ġfl are", - "ĠAd ri", - "ñ as", - "Ġback l", - "mä ÃŁ", - "ĠB end", - "Ġworkload s", - "ĠÑģ Ñĥп", - "Ġ197 5", - "им ÑģÑı", - "ан е", - "Ġм он", - "Ġaspir ations", - "ĠA er", - "ĠговоÑĢ иÑĤÑĮ", - "ĠQ ian", - "å¦ Ī", - "Ġcomprom ised", - "Ġyol k", - "ла ÑģÑĤ", - "Ġhe men", - "ro ve", - "d ens", - "Ġком менÑĤ", - "Ġ- --", - "Ġflu ores", - "но Ñģ", - "ĠLiver pool", - "ĠÑģоб ой", - "ĠZ we", - "Ġl umin", - "ĠO G", - "á ¸", - "hol m", - "pro fits", - "S N", - "Ġproport ions", - "Ġm ica", - "ĠB oh", - "ĠAt las", - "Ġuns ure", - "Ġtour ing", - "Ġn ied", - "Ġt ÄĻ", - "Ġimper ative", - "Ġdem ek", - "ĠSher iff", - "r ance", - "Ġhom eland", - "ĠH ail", - "ĠG anz", - "y mm", - "M on", - "åĨ ·", - "v ida", - "Ġdesar roll", - "æĬ Ģ", - "Ġintrig uing", - "ĠH ugo", - "Ġ ãĤĤ", - "é ¬", - "а ÑĨ", - "ĠWiÄĻ c", - "att ed", - "ĠìķĦëĭĪ ê³ł", - "ĠV ari", - "á d", - "Ġsur real", - "Ġdispar ities", - "Ġm ó", - "ull en", - "ĠìŀĪ ëĭ¤ê³ł", - "Ġп ожалÑĥйÑģÑĤа", - "Ġma ins", - "Ġe ject", - "Ġmeth ane", - "Ġmarginal ized", - "Ġchill i", - "r ès", - "Ġy em", - "ä½ł æĺ¯", - "ĠCh un", - "Ġdeb ts", - "Ġdownload ing", - "ĠAth ens", - "is ierung", - "ry n", - "Ġte kn", - "ĠQu indi", - "éľ Ģ", - "Ġtara f", - "Ġh é", - "Ġconscious ly", - "Ġfix es", - "uck le", - "may ın", - "Ġfre i", - "Ġsp a", - "Ġì§Ħ íĸī", - "ĠاÙĦØ °", - "ĠÑĥ к", - "let t", - "Ġolm uÅŁ", - "Ġche esy", - "า à¸ģ", - "na ire", - "Ġw iden", - "Ġli en", - "Ġesca ping", - "igg s", - "ĠBl ick", - "c Äħ", - "ĠìĦ ľë", - "Ġ×Ķ× ¡", - "Ġв пеÑĢ", - "oph one", - "ie ll", - "ĠSU BSCRI", - "Ġl ions", - "Ġê·¸ ê²ĥ", - "Ġinsp ires", - "Ġguarante es", - "Ġcome ça", - "ĠGrow ing", - "Ġneg lig", - "ĠFrank f", - "Ġge geben", - "ĠÄij ầu", - "Ġend lich", - "Ġì į¨", - "ĠT T", - "ĠL ith", - "ÏĢ α", - "aster n", - "ĠA zer", - "Ġlun ar", - "h ic", - "Ġна ÑĢод", - "Ġnen hum", - "è· ij", - "ĠSalv ador", - "ĠPro gress", - "Ġprivile ges", - "ĠëıĻ ìķĪ", - "Ġant agon", - "ĠImp f", - "Ġdesc ub", - "ĠLe i", - "ĠìĥĪë ¡ľ", - "Ñĩ е", - "Ġdó lares", - "ĠMeg han", - "ĠW ire", - "to o", - "ay ing", - "us c", - "Ġt ud", - "Ġappe als", - "ed uc", - "Ġp ane", - "Ġj i", - "Ġde cks", - "ĠAl ter", - "Ġ å°±", - "ìĦ ¤", - "åĪĨ éIJĺ", - "Ġproduct ions", - "ĠWILL IAM", - "Ġimpl ied", - "Ġfulfill ment", - "ĠA ah", - "Ġsa ja", - "x us", - "ĠÎļ αι", - "Ãł s", - "uc ch", - "ок о", - "ĠDisc ord", - "ĠS Y", - "j sk", - "ĠWall ace", - "un ction", - "Dan iel", - "Ġk öt", - "ij ah", - "Ġmarch e", - "Ġdis gr", - "Ġm ungkin", - "Ġal ma", - "³ µ", - "Ġextensive ly", - "ĠFl oren", - "ĠAll ison", - "ãĤ ±", - "ÙĬ Ùħ", - "Ġju ven", - "ĠRena issance", - "Ġfundra ising", - "ĠCha os", - "Ġpar aly", - "Ġnarr ator", - "Ġecosystem s", - "A sh", - "Ġmitig ation", - "ĠA ujourd", - "ĠIde e", - "! ,", - "Ġ ½", - "Ġland lord", - "Ġdefect s", - "Ġac re", - "uls ive", - "Ġalg ae", - "pe k", - "Ġem ba", - "ĠR oc", - "éĽ ¢", - "ks om", - "ä che", - "Ġle uk", - "Ġlever aging", - "Ġê·¸ëłĩ ì§Ģ", - "ĠPal m", - "Ġä ven", - "Ġl is", - "ĠIn sp", - "ĠR ita", - "ĠAb b", - "ith m", - "Ġsuper vision", - "Ġrevis it", - "Ġpi ÄĻ", - "Ġeu h", - "Ġf ades", - "Ġmot to", - "åį ¡", - "ез ж", - "ĠSh im", - "Ġrelev ance", - "Ġo o", - "Ġo stat", - "n ica", - "Ġcho ix", - "ĠFac ulty", - "Ġì¤ij ìĹIJ", - "ĠAb ove", - "Ġнеб олÑĮÑĪ", - "Ġsequ encing", - "Ġnutri ent", - "Ġconqu ered", - "Ġdigest ive", - "Ġback drop", - "ĠL ori", - "ail able", - "G ame", - "Ġneglect ed", - "om orph", - "ill ah", - "Ġkn e", - "Ġsi itä", - "Ġworks pace", - "ĠVen ice", - "ĠK ne", - "Ñī о", - "ħ Ģ", - "ĠH ass", - "Ġv ita", - "Ŀ¼ë ©´", - "Ġlay s", - "ên cias", - "é rica", - "ĠL l", - "æ± Ĥ", - "ĠCo ca", - "ĠWH Y", - "èĪ ŀ", - "Ġrout ing", - "Ġperm issions", - "Ġd ings", - "pre nd", - "pro gram", - "Ġcro cod", - "br al", - "AAAA AAAA", - "ag it", - "ĠN ä", - "Ġgek ommen", - "at ten", - "Ġrefer enced", - "Ġpair ing", - "ĠPart ner", - "ĠCoron avirus", - "Ñĸ Ñģ", - "è½ ī", - "Ġ×Ķ× ĵ", - "Ġespec ÃŃfic", - "ars i", - "qu elle", - "Ġspont aneous", - "çĨ ±", - "Ġê²ĥ ìĿĦ", - "ĠÐŁÐ¾Ñģ ле", - "ĠاÙĦ د", - "ĠSh out", - "Ġн ал", - "Ġdisgu ise", - "ĠJ ord", - "Ġwe e", - "Ġmiej sc", - "Ġser um", - "Ġplais ir", - "Ġcred ible", - "Ġb Ã¥", - "ĠA J", - "ma res", - "Ġrod s", - "Ġer an", - "ãģ¾ ãģĤ", - "Ġp ää", - "ĠU A", - "ĠUn known", - "ĠÙĦ Ùħ", - "ĠRab bi", - "Ġla at", - "Ġhairst yle", - "ĠØ º", - "éģ ĭ", - "Ġc ach", - "ĠWr iting", - "оÑĩ ки", - "ab ad", - "Ġstraight en", - "-- \"", - "w ife", - "Ġhott est", - "Ġpun ya", - "ĠF ashion", - "gr iff", - "ĠQ R", - "ot ch", - "ĠÐľ ожеÑĤ", - "Cl oud", - "ĠStri ke", - "ĠHe in", - "Ġ 羣çļĦ", - "Ġle i", - "ĠFl ow", - "weg s", - "Ġha br", - "åīĽ åīĽ", - "nah me", - "Ì ģ", - "Ġple asing", - "op ping", - "Ġ구ë ıħ", - "Ġdr an", - "Ġbang s", - "Ġ7 9", - "Ġsk et", - "Ġcav al", - "ĠMac ron", - "Ġweight ed", - "Ġm uted", - "Ġnuest ras", - "EE P", - "Ġmath ematic", - "ĠM RI", - "ag us", - "Ġtherap ies", - "θ ε", - "Ġun pl", - "Ġcomm encer", - "f ull", - "Ġtow els", - "Ġpr ue", - "Ġlic enses", - "׼ ×ķ׾", - "ĠÐŁ оÑĩемÑĥ", - "Ġpoint less", - "B ye", - "Ġelig ibility", - "Ġscra pe", - "Ġab usive", - "ĠM ant", - "Ġje unes", - "t al", - "ĠPrin cip", - "ĠOrth odox", - "Ġmel od", - "ĠмаÑĤ еÑĢи", - "Ġprosecut or", - "Ġopio id", - "ĠÑĥ веÑĢ", - "ĠBe en", - "Ġìłij ì¢ħ", - "Ġd ynasty", - "Ġajud a", - "Ġent reg", - "Ġweigh ed", - "Ġe ure", - "ĠB em", - "Ġab normal", - "8 2", - "ĠJ R", - "ĠA kt", - "ĠB ri", - "ú t", - "Ġst agn", - "! *", - "Ġwe gen", - "Ġle aking", - "ĠW ords", - "ĠM au", - "Ġv ue", - "ĠL iam", - "ани ем", - "Ġclin icians", - "ĠP ump", - "Ġför st", - "? ...", - "Ġautom otive", - "ĠOw en", - "zus agen", - "ĠH undred", - "Ġdecentral ized", - "Ġbul bs", - "Ġ×ľ× Ľ", - "Ġprovin ces", - "ĠMil an", - "8 1", - "k as", - "Ġëĵ £", - "Ġfor ça", - "Ġright ly", - "å³ ¶", - "r Äħ", - "Ġven ues", - "Ġw ai", - "Ġpred icting", - "ĠWi Fi", - "Ġê¶ģ ê¸Ī", - "ر ÙĪ", - "Ġ×Ķ× ĸ", - "cent ury", - "Ġgrad ual", - "ĠProblem e", - "ĠìĹ ħ", - "Ġcop ing", - "ĠBr us", - "Ġpean uts", - "irts chaft", - "Ġз ал", - "ĠT roy", - "Ġsper m", - "ĠM itar", - "ĠTür kiye", - "g rand", - "¦ Ń", - "Ġ×ŀ× ¡", - "Ġp ans", - "ĠKnow ledge", - "ber ly", - "ĠÐķ го", - "Ġdan ced", - "ĠFr ost", - "ĠB urg", - "Ġbit ing", - "ìłķ ìĿĦ", - "me al", - "Ġhero ic", - "Ġmother board", - "ĠL icht", - "ãģ£ ãģ", - "ll an", - "ай н", - "ĠÑĢ Ñıд", - "Ġ à¹Ģà¸", - "on en", - "ir ie", - "Ar t", - "r ang", - "ν η", - "Ġnew born", - "Ġam is", - "Ġا ÙĪر", - "Ġsoph om", - "ĠCare ful", - "Ġprospect s", - "ens en", - "Ġthr ill", - "ĠVi á»ĩt", - "A dam", - "r ition", - "ent ric", - "ud en", - "Ġcertific ates", - "Ġas hes", - "èª ¿", - "play ing", - "Ġs adece", - "Ġo st", - "Ġairpl anes", - "ÑĢ ок", - "on er", - "Ġmagnes ium", - "Ġgod damn", - "Ġ197 2", - "ĠSch ule", - "Ġtem at", - "Ġpart out", - "௠Ĥ", - "Ġin ve", - "ĠScient ists", - "ĠHud son", - "win ning", - "ceks in", - "Ġcongress ional", - "or u", - "Ġro pes", - "в ед", - "Ġmad re", - "Ġf erry", - "ĠCoh en", - "ĠP red", - "Ġvag y", - "Ġб еÑģп", - "Ġmult im", - "Ġdrain age", - "Ġsim ulator", - "g iggles", - "ĠSt adium", - "об Ñī", - "Ġnot ices", - "Ġcraw ling", - "Ġgr oupe", - "åı ¸", - "Ġkto ÅĽ", - "ĠY oga", - "Ġmed ida", - "ĠÑħ ваÑĤ", - "ĠL ite", - "Ġr av", - "or ama", - "Ġdisc ord", - "ĠDI RE", - "Ġte h", - "ĠN urs", - "ç² ī", - "Ġpitch ed", - "Ġbark ing", - "ĠC oke", - "wi ad", - "Ġpop ulated", - "éĻ ¤", - "pe lled", - "Ġб ог", - "Ġpe wno", - "ĠC ube", - "Ġrecru ited", - "éĢĻ 種", - "ĠC ara", - "ıģ ını", - "im ated", - "ĠÑĪ кол", - "ic ional", - "ĠпÑĢо ÑĦ", - "Ġcontam ination", - "Ġúlt imos", - "Ġfear ful", - "Ġele phants", - "us i", - "ĠiT unes", - "ĠSw ami", - "ê ¼", - "ĠìĦ¤ë ªħ", - "ĠRich ards", - "Ġmagn ets", - "ĠRicht ung", - "ĠLeg ion", - "èı ľ", - "Ġk itty", - "Ġkiss ed", - "Ġwater ing", - "Ġcon o", - "ĠPalest ine", - "id ir", - "Ġma ze", - "Ġflu ids", - "ĠProdu cer", - "ĠKr sna", - "好 åķ¦", - "la f", - "Ġ×IJ ×ķ", - "Ġm iesz", - "ĠX ing", - "oint ed", - "se in", - "ĠF uk", - "ĠDep ression", - "ĠD uty", - "ĠPan ther", - "Ġsu nd", - "Ġref ere", - "Ġexc lusion", - "Ġnav al", - "ĠWin ston", - "Ġsl ogan", - "Ġhypoth etical", - "Ġelev ate", - "ë ł¹", - "Ġcabe ça", - "ĠGes und", - "m eter", - "ĠìķĦëĭĪë ©´", - "Ġcloud y", - "âĢ¦ ?", - "ĠSch ritt", - "ĠJ S", - "ì į", - "ĠSpr ings", - "ĠB atter", - "· °", - "Ġtail or", - "ĠPTS D", - "ĠG ent", - "Ġba ÄŁ", - "Ġspat ula", - "Ġcr ay", - "ĠLeg isl", - "Ġs ú", - "Ġle ve", - "า ม", - "Ġer ad", - "Ġdon g", - "Ġd erm", - "ĠBank s", - "ich o", - "åħĪ çĶŁ", - "ĠFr anz", - "ra vel", - "éģ Ķ", - "ол о", - "Ġfl ute", - "ĠE k", - "Ġjoy ful", - "Ġch ased", - "ĠLar ge", - "O ver", - "Ġentrepreneur ial", - "Ġcons iders", - "Ñĥ ем", - "op a", - "Ġdorm ir", - "ĠElement ary", - "Ġprzy pad", - "ÑĥÑģ ка", - "ĠоÑĩ еÑĢ", - "ug ene", - "Ġten ido", - "Ġlug ares", - "ë ¥", - "ĠÑĩ аÑģÑĤ", - "Ġsa o", - "Ġbra id", - "ĠV ere", - "ĠRe ich", - "ĠP oss", - "Ġin an", - "w and", - "re f", - "Ġmont rer", - "Ġ198 1", - "çķ ª", - "as ında", - "Ġch rome", - "ĠTr inity", - "Ġexplo itation", - "ĠS ense", - "ĠC MS", - "ĠNo ble", - "ĠìĦł íĥĿ", - "Ġswe lling", - "elect ronic", - "] ?", - "Ġbr ushing", - "Ġliquid ity", - "ĠH ook", - "ĠCon nor", - "ĠAl um", - "Ġgu cken", - "su ite", - "Ġwie le", - "Ġbarrel s", - "ĠReg el", - "ĠM ent", - "ĠT rip", - "ĠBr ush", - "ĠE rik", - "ur ate", - "ÉĻ r", - "ĠC yr", - "ou ble", - "ĠBe cca", - "Ġpass words", - "Å ±", - "bor g", - "Ġv endo", - "ĠCla us", - "ĠF az", - "ind est", - "Ġdece ased", - "Ġcompar isons", - "ĠL CD", - "ĠP ork", - "Ġevent ual", - "Ġpat reon", - "Ġin ability", - "Ġext inction", - "Ġì¢ĭìķĦ íķĺëĬĶ", - "ĠÑģ оÑģ", - "aj u", - "Ġ×ij× IJ×", - "Ġso fort", - "Ġdest ined", - "ĠR in", - "Ġmouth s", - "ĠNat ürlich", - "Ġpres erving", - "Ġlim p", - "é» ¨", - "oc used", - "ин г", - "Ġexp osing", - "ĠÎ ¾", - "ë į", - "la ugh", - "Ġhis s", - "ãģł ãģĭãĤī", - "Ġind ie", - "Ġdet al", - "ÑĢав ÑģÑĤв", - "Ġtr ên", - "æķ °", - "Ġog ni", - "Ġsimple mente", - "Ġ197 8", - "Ġgo o", - "Ġ196 7", - "Ġgen ug", - "h ö", - "Ġhist ó", - "å® Ł", - "Ġlob ster", - "c endo", - "Ġte il", - "Ġalle vi", - "00 00", - "OL D", - "Ġpes os", - "Ġbon uses", - "Ġam i", - "Ġrev ival", - "ĠHor se", - "Ġs ack", - "T alk", - "Ġmul her", - "ĠпоÑģÑĤо Ñıн", - "ĠH ood", - "H uh", - "Ġë¶ ģ", - "Ġhy ung", - "ĠMe eting", - "Ġimport a", - "Ġì°¾ ìķĦ", - "ĠV ern", - "Ġstri pped", - "Ġref uses", - "Ġqual ifications", - "op l", - "Ģë ıĦ", - "ix ÃŃ", - "Ġdi ab", - "it ime", - "fl ows", - "Ġin ac", - "ĠG ong", - "Ġmeaning less", - "Ġcourage ous", - "Ġmicro bi", - "az y", - "h ist", - "Ġvolunte ering", - "V IE", - "Ġviol ated", - "Ġsymp athy", - "ĠEd it", - "好 åĥı", - "elect ric", - "produ ct", - "Ġpand emia", - "Ġgeomet ric", - "ĠCon vers", - "g re", - "Ġgl ut", - "ist ed", - "ĠاÙĦ Ùĥ", - "ĠCh ain", - "ĠPres ent", - "ĠY in", - "ĠÑģ ог", - "ĠV log", - "Ġìĸ´ë ¨¸", - "Ġdon n", - "Ġh itch", - "uck ing", - "ãģĬ ãģĦ", - "w ald", - "ris k", - "Ġhar i", - "ĠK ens", - "ĠId ol", - "Ġвним ание", - "Ġtod d", - "Ġsm ashed", - "Ġinv ari", - "Ġкон ÑĤÑĢ", - "Ġaut istic", - "ìŀ¥ ëĭĺ", - "R es", - "д Ñĭ", - "ch au", - "Ġsel v", - "Ġhät ten", - "ठ¿", - "Ġexpect s", - "Ïģ η", - "Ġaç ık", - "ĠHT TP", - "le ÅŁ", - "Ġswe eping", - "ĠBet a", - "Ġcounterpart s", - "ab ile", - "ĠSim s", - "C s", - "Ġrep ar", - "s qu", - "Ġprovin cial", - "Ġshare holders", - "Ġrun ter", - "Ġged acht", - "ĠTe en", - "Ġgrand s", - "çĶ ¢", - "ag les", - "Ġrock y", - "ven s", - "Ġr ivals", - "un al", - "Ġreact s", - "ë ©", - "Ġmerc ury", - "ĠLu igi", - "Ġо г", - "ĠJ UST", - "Ġl od", - "Ġcort ex", - "w ig", - "Ġl akh", - "ì¤ij ìĹIJ", - "ĠV ic", - "ĠM und", - "Ġma pped", - "ĠD ell", - "ĠD ruck", - "Ġlif es", - "алÑĮ ное", - "ivid ual", - "ad ım", - "Ġat rav", - "ĠFl ug", - "ĠKle in", - "ê±° ìķ¼", - "ห à¸Ļ", - "Ġapp li", - "ா ?", - "ü yorum", - "ĠинÑĤеÑĢеÑģ но", - "Ġdis infect", - "> -", - "Ġchamp agne", - "Ġk la", - "op ers", - "Tr ans", - "ĠDes ert", - "Ġcultiv ate", - "ĠFuck ing", - "idel ity", - "ĠÑĤ ан", - "Ġinc ub", - "Ġtem u", - "Ġlearn er", - "found er", - "ĠSy l", - "ãĤ Ģ", - "Ġf ato", - "z ier", - "ĠìĹĨ ìĿ´", - "ĠìĪ ¨", - "Ġpsych o", - "ĠÑĤел еÑĦ", - "Ġregard e", - "Ġrepresent ations", - "Ġlit igation", - "Ġsp ann", - "ult s", - "b ior", - "è¦ĭ ãģ¦", - "ä¸į å¤ļ", - "ĠSur vey", - "ĠLED s", - "Ġtr ä", - "Ġl ên", - "Ġant ioxid", - "еÑĢ ом", - "Ġindu ction", - "Ġfool ed", - "ät zlich", - "ĠговоÑĢ ÑıÑĤ", - "ĠF act", - "umb ai", - "Ġw iggle", - "NO UN", - "Ġdévelop p", - "ĠCl aro", - "Ġì ¸", - "ë ¬", - "ãģªãĤĵ ãģł", - "Ġaccum ulate", - "Ġmaint ains", - "ë Ħ", - "ĠFight er", - "íĨ ł", - "Ġmat in", - "Ġcoup on", - "Ġst unt", - "Ġdeb uted", - "å¾ħ ãģ£ãģ¦", - "Ġpra g", - "ив аем", - "7 3", - "Ġexp res", - "Ġìĺ¤ë ¹ł", - "ĠпеÑĢ Ñģон", - "Ġcalcul us", - "Ġab rupt", - "ĠInspect or", - "our t", - "æĸ Ļ", - "ź niej", - "int ense", - "B a", - "Ġl ounge", - "Ġast hma", - "ĠHi ç", - "ª »", - "Ġeditor ial", - "Ġse ize", - "Ġk ır", - "Ġm ouve", - "Ġtier ra", - "Ġtestoster one", - "Ġr h", - "ĠKing ston", - "EL LE", - "ĠRepresent ative", - "Ġ197 4", - "Ġi ba", - "T s", - "Ġsort a", - "Ġ( ?)", - "Ġت ÙĪ", - "ĠëĤ´ë ł¤", - "Ġbek ommt", - "Ġspirit ually", - "Ġdist orted", - "M ad", - "Ġre im", - "á nh", - "ĠOtt oman", - "ĠRel ig", - "ĠEl s", - "Ġret ained", - "ĠLa ughs", - "æĢ »", - "ĠS AS", - "ĠколиÑĩе ÑģÑĤво", - "×ķת ר", - "Ġinnov ate", - "Ġk ork", - "ĠÑĢаÑģÑģк азÑĭв", - "ond ere", - "iv i", - "ay e", - "ount y", - "ĠполÑĥÑĩ аеÑĤÑģÑı", - "Ġbun s", - "åħ «", - "Ġyüz den", - "Ġsur geries", - "Ø£ ÙĨ", - "Ġbankrupt cy", - "w elt", - "Ġsi amo", - "Ġdark est", - "ĠH ann", - "gg a", - "Ġform as", - "ĠD j", - "n amed", - "Ġshield s", - "ue ller", - "ĠF ew", - "Ġl ace", - "Ġfur ious", - "ĠY U", - "Ġsociet al", - "Ġjudge ment", - "ĠD os", - "Ġj ab", - "law s", - "Ġrein vent", - "ĠK atherine", - "ĠCh oi", - "ad ows", - "Ġr ans", - "od en", - "ĠMid west", - "n ın", - "Ġdep ort", - "ĠD ip", - "ç´ ħ", - "Ġaten ción", - "ĠCourt ney", - "ivid ad", - "ĠÚ© Ûģ", - "Ġeffic acy", - "ĠBrook s", - "Ġrefer ral", - "Ġкон ÑĨ", - "Ġmal icious", - "Ġk ir", - "ĠGod dess", - "Ġfun ky", - "Ġinter im", - "ĠK örper", - "Ġìĸ¼ë §", - "k ur", - "Ġк ли", - "Ġtruc s", - "ges etz", - "Ġz ug", - "ĠGl ück", - "ĠMin ute", - "Ġprest igious", - "Ġnie z", - "Ġconcent rations", - "ла ÑģÑĤи", - "ĠS is", - "ĠVit amin", - "ko v", - "ĠP BS", - "Ġне е", - "Ġretail ers", - "Ġcon ventions", - "ĠSam antha", - "Ġproud ly", - "J ordan", - "ĠJ ASON", - "at k", - "Ġtr iste", - "Ġst är", - "Ġreiter ate", - "Ġpos terior", - "Ġ197 3", - "ĠP ine", - "ĠJul iet", - "Ġped ir", - "k il", - "Ġover lapping", - "Ġexclud e", - "Ġecon óm", - "Ġaccept s", - "ĠS ter", - "æ± º", - "Ġìļ ´ëıĻ", - "est ab", - "Ġt ug", - "ar g", - "Ġliv ro", - "Ø§Ø µ", - "Ġse ams", - "Ġbur aya", - "Ġe llo", - "ĠT M", - "ĠP aw", - "ĠInd ex", - "Ex c", - "Ġinspir ational", - "Ġd unk", - "è° ģ", - "ak ter", - "Ġcondition er", - "ĠSal ut", - "ÅĤ ec", - "Ġìī ½", - "ĠÑĥз на", - "ĠRome o", - "f ruit", - "ĠY O", - "Ġchá» ī", - "б Ñĥ", - "b ons", - "Ġreprodu ctive", - "Ġor ada", - "Ġíļ ¨", - "Ġtent ar", - "Ġma ñana", - "ãĤ ¬", - "Ġsol vent", - "Jess ica", - "ĠLeg al", - "Ġtu a", - "Ġs ic", - "ĠE Q", - "au kee", - "ìĭľ ëĭ¤", - "ĠÅŀ u", - "Ġad here", - "ĠT ul", - "Ġà® Ĩ", - "Ġtext books", - "ĠFif th", - "Ġexper i", - "Ġch ic", - "Ġhe ap", - "in ely", - "at ra", - "T wo", - "Ġhele maal", - "Ġf ren", - "æİ ¨", - "Ġbis her", - "Ø§Ø ´", - "ĠìĦł ìĥĿ", - "ĠT ages", - "Ġs á»±", - "Ġbull ied", - "Ø ¤", - "Ġbenef ited", - "ĠPre viously", - "ĠÑį ÑĦÑĦ", - "Ù į", - "Ġsen ate", - "ĠM orm", - "ij ke", - "ĠF lu", - "Ġincorpor ating", - "j ack", - "Ġп иÑĤ", - "Ġimp ly", - "Ġha cks", - "ĠR ICH", - "Ġк ваÑĢ", - "ĠпÑĢек ÑĢаÑģ", - "Ġdepend ency", - "Ġìļ ©", - "Ġì± ħ", - "Ġwäh rend", - "Ġsu lla", - "ĠPitts burgh", - "Ġesemp io", - "¼ë ¡ľ", - "pr ot", - "ĠR osen", - "ĠIndepend ence", - "Ġpars ley", - "ie gen", - "Ġha w", - "Ġaqu ell", - "ĠC AP", - "ĠÑĢабоÑĤ аÑĤÑĮ", - "ĠCl iff", - "ion ar", - "Ġsec uring", - "æĪijåĢij çļĦ", - "ν ε", - "Ġutil is", - "Ġcou le", - "ĠP ing", - "Ġtre k", - "Ġf ak", - "Ġenorm e", - "Ġìĭ «", - "è® ©", - "Ġdoub ling", - "ĠнÑĢав иÑĤÑģÑı", - "Ġh ed", - "ho ven", - "ĠStand ing", - "Ġm ÃŃn", - "ĠJ imin", - "Ġmon arch", - "Ġco ke", - "Ġm r", - "Ġcl ic", - "à į", - "Ġimpe achment", - "Ġdur ability", - "Ġvar ios", - "Ġcommercial s", - "Ġgreet ings", - "ĠR i", - "ĠApp reci", - "ìŀĪ ëĬĶ", - "Ġrés ult", - "ér t", - "Ġsal ute", - "Ġpoder ia", - "Ġsun rise", - "ve ck", - "Ġreluct ant", - "Ġcommission er", - "å¿ µ", - "â te", - "ĠKen ny", - "ĠSir i", - "ãĥĥ ãĥĹ", - "ĠëĬ ĺ", - "ĠE E", - "Ġun ch", - "к он", - "ĠاÙĦØ ¥", - "Ġbel ts", - "Ġhas s", - "Ġмо Ñı", - "Ġdispl aced", - "Ġab ra", - "ÎŃ Î»", - "Ġscratch es", - "Ġcom et", - "Ġauthor ization", - "ĠL LC", - "Ġprodu k", - "Ġrehabil itation", - "å ŀ", - "Ñĸ Ñĩ", - "ud ing", - "ol it", - "Ġ10 5", - "Ġexp ands", - "Ġalt ri", - "ĠKom ment", - "Ġan f", - "P l", - "ĠM ana", - "f ed", - "Ġb ri", - "Ġor a", - "G s", - "ĠG ur", - "uck land", - "Ġjun ction", - "Ġiron ic", - "ĠFe ed", - "Ġpra kt", - "ĠHam mer", - "Įë ıĦ", - "ĠTr acy", - "çµ ±", - "ĠAs ide", - "н его", - "ĠиÑģполÑĮз оваÑĤÑĮ", - "Ġz aj", - "Ġequ itable", - "Ġcur b", - "Ġãģĵ ãĤĮ", - "Ġderiv atives", - "Ġpupp ies", - "ĠKenn eth", - "ĠCom pl", - "ig ram", - "ĠGar cia", - ") \"", - "ĠHar bor", - "est ial", - "Ġ ä¾Ĩ", - "Ġ ers", - "æ ¹", - "Ġunw anted", - "Ġbel ang", - "аР³Ð¾", - "em b", - "d os", - "ĠìĻ ľë", - "ĠBud get", - "Ġbatt ling", - "ØŃ Øª", - "k ok", - "наÑĩ ала", - "Ġpl ag", - "Ġcant idad", - "Ġgrup os", - "Ġplug ins", - "ler ini", - "Ġиме еÑĤ", - "Ġso zusagen", - "ol ics", - "Ġpue blo", - "Ġrem inis", - "r än", - "ĠMor rison", - "Ġl inha", - "Ġbreath s", - "ĠT aste", - "Ġenf rent", - "ĠDo cker", - "Ġд ен", - "Ġethnic ity", - "Ġw ob", - "Ġsuff ers", - "Ġtransition ing", - "ĠR ange", - "ÄĻd zy", - "Ġк аÑĤ", - "Ġsy ner", - "Ġdon ut", - "Ġprob abilities", - "ĠO mar", - "Wh ich", - "u ish", - "is in", - "Ġdem os", - "ĠìłĢ 기", - "Ġëĺij ê°Ļ", - "Ġед ин", - "Ġc erve", - "Ġj oka", - "I AN", - "Ġkilomet er", - "Ġhorizont ally", - "ĠBh ag", - "Ġ- >", - "ĠMon itor", - "Ġknowledge able", - "Ġf av", - "Ġpin ned", - "Ġe Bay", - "ick er", - "Ġìŀłê¹ IJë§Į", - "ĠXia omi", - "Ġcap it", - "Ġn p", - "Ġ196 5", - "ho e", - "Ġn ok", - "ĠS age", - "Ġн елÑĮзÑı", - "ĠT ow", - "g am", - "Ġdic en", - "ĠSUBSCRI BE", - "Ġrebo ot", - "Ġp aj", - "Ġë³´ìĹ ¬ë", - "Ġth icken", - "ĠRe ality", - "id än", - "N a", - "Ġê²ĥ ìĿĢ", - "!! )", - "Ġrout ines", - "Ġод ного", - "Ġex ting", - "Ġì¦ Ŀ", - "Ġsulf ur", - "Ġcar ve", - "Ġastero id", - "ĠWarri or", - "Ġphotograph ers", - "Ġpe ll", - "Ġcros sover", - "æĪij çŁ¥éģĵ", - "Ġhace mos", - "ĠNe j", - "Ġsett ling", - "Ġir m", - "ĠBook s", - "ient ôt", - "Ġesp acio", - "ĠSchol ars", - "Ġdo omed", - "ĠIR S", - "w ohl", - "Ġseg ue", - "ĠëĪĦ ê°Ģ", - "Ġpr atic", - "B T", - "ĠConsider ing", - "ĠBuff alo", - "Ġtrain ings", - "Ġge bru", - "ĠG leich", - "Ġpir ates", - "Ġen velop", - "Ġre open", - "im at", - "Ġte e", - "Ġsu ed", - "fe h", - "Ġ×Ķ× §", - "Ġdi ets", - "Ġjunt os", - "ast o", - "Ġmisunder stood", - "Ġru im", - "Ġclass ify", - "ĠпÑĢод Ñĥк", - "Ġin se", - "Ġillust rated", - "Ġcorros ion", - "Ġacc red", - "ĠAunt ie", - "ĠпÑĢив еÑĤ", - "ĠLI VE", - "Ġre k", - "Ġrece ipt", - "åĪ° åºķ", - "ĠBar bie", - "ĠSn ake", - "t urn", - "Je ff", - "ãģĬ ãģĬ", - "ķ Ħ", - "VO ICEOVER", - "co ll", - "Ġrun ners", - "ìł ľë", - "os os", - "mo on", - "Ġkey note", - "ĠInst it", - "S PEAK", - "Ġplug s", - "Ġcur v", - "ĠY uri", - "ĠTh eres", - "ĠP s", - "Ġμ ÏĢο", - "Ġconver ter", - "Ġref ine", - "Ġbad ass", - "Ġο ι", - "Ġreg en", - "az zi", - "ÙĬ Ùģ", - "Ġse ized", - "Ġiç er", - "ile e", - "Ġup stream", - "Ġbud s", - "Ġp im", - "Ġíķĺë £¨", - "Ġall uded", - "Ġthem ed", - "Ġconsist ing", - "Ġb ons", - "un uz", - "ĠпÑĢов од", - "ĠLove ly", - "ॠĭ", - "Ġpar ach", - "ĠSta ats", - "éļ Ĭ", - "Ġselect ive", - "Ġf ase", - "ĠGeor get", - "Ġcoc aine", - "Ġreprodu ction", - "ĠL ara", - "ĠL D", - "Ġg h", - "J on", - "Ġl Ã¥", - "Ġëij IJë", - "Ġtyp ed", - "ĠB ana", - "ë ĵľë", - "Ġsav ory", - "ĠZ omb", - "stand en", - "Ġpedest rian", - "Ġdifférent s", - "Ġìĭ ¸", - "èī ¯", - "Ġcompl ained", - "ç¦ ı", - "ĠÐļ ÑĤо", - "Ġ×ľ× ¤", - "ali ÅĽmy", - "Ġmort ar", - "Ġverd ict", - "Ġsu ficiente", - "ĠMill ion", - "mitt el", - "in als", - "ĠاÙĦØ ®", - "аÑİ ÑģÑĮ", - "Ġmi ÄĻdzy", - "ĠO le", - "Ġin vert", - "czy Äĩ", - "озм ожно", - "star ter", - "Ġaud itor", - "ĠSc out", - "ch ien", - "ĠSver ige", - "uff led", - "Ġze hn", - "ĠA uckland", - "Ġarg ent", - "Ġ197 6", - "ĠHo e", - "Ġboth ers", - "Ġsocial ist", - "Ġpl iers", - "Ġemer gen", - "ĠX P", - "еÑĢ ов", - "M ore", - "ĠLe vi", - "ĠAnd ers", - "ibil idad", - "ĠP arents", - "Ġindu ced", - "ìĸ´ì ¤", - "Ġbal ances", - "ĠвÑĭ ÑĪ", - "Ġsubmar ine", - "St art", - "Ġdri es", - "Ġvol ver", - "Ġtick ing", - "c ott", - "Ġf aj", - "pr és", - "ĠS abb", - "Ġза Ñĩ", - "Ġпок Ñĥп", - "Ġbapt ized", - "ĠBrill iant", - "ĠÐij ог", - "Ġm ots", - "b its", - "Ġlatt ice", - "æĪij è·Łä½ł", - "Ġcor iander", - "Ġresid ency", - "yn c", - "Ġpier wszy", - "ĠKn ock", - "ĠZ ap", - "ĠÐķ в", - "ê² ¬", - "å°ı å¿ĥ", - "Ġune ven", - "ĠJ as", - "od or", - "ç¿ Ĵ", - "7 4", - "ĠS ite", - "Ġacontece u", - "ym pt", - "Ġtril ogy", - "Ġlan tern", - "ĠZ ucker", - "v ari", - "we lling", - "ĠPot ato", - "gom ery", - "Ġreact ed", - "ĠChr on", - "Ġj ede", - "be eld", - "Ġtw ent", - "Ġl act", - "æ¨ Ĥ", - "Ġré se", - "Ġrel ent", - "Ġfurn ace", - "Ġwid get", - "Ġearthqu akes", - "ĠAd just", - "il it", - "ĠØ£ ÙĪ", - "Ġhear ings", - "Ġdefend ant", - "irs iniz", - "Ġbas k", - "c ja", - "ľ ¨", - "Ġrif les", - "Ġinst al", - "ĠFor give", - "p ical", - "ĠÐŀÑĩ енÑĮ", - "Ġpet ites", - "Ġh p", - "Ġren owned", - "ĠIn n", - "Ġ주 ìĦ¸ìļĶ", - "Ġemphas ized", - "éĹ® é¢ĺ", - "ĠìŀĪ ì£ł", - "Ġê²ĥ ìľ¼ë¡ľ", - "ãĤ Ĩ", - "Å ĵ", - "g ili", - "D ave", - "Ġexha usting", - "ÅĤ ug", - "Ġsch ema", - "μ ά", - "cy cl", - "Ġaut ant", - "Ġpar cel", - "Ġmater ia", - "ĠB erry", - "ĠÑģ ами", - "Ġextract ed", - "ĠSay ing", - "ism atic", - "Ġпоп ÑĢоб", - "Ġneur on", - "g raph", - "ľë ©´", - "Ġencl osure", - "ĠJoh ann", - "Ġafter math", - "ÑĤ об", - "Ġu ży", - "Ġs amp", - "3 60", - "ĠMe i", - "Ġt aco", - "Ġrecept ors", - "Ġpunch es", - "ĠHo je", - "ĠÙĩ ÙĨا", - "=\" #", - "ĠAng ular", - "Ġmus ique", - "Ġro l", - "Ġà ±", - "ster reich", - "Ġcl am", - "ĠTre asury", - "chem ical", - "Ġap ar", - "Ġapp end", - "Ġforb id", - "ĠHamb urg", - "ак ов", - "Ġê¸ Ī", - "ild a", - "Ġprepar ations", - "Ġmog Äħ", - "Ġcam ino", - "E ric", - "ĠBl ind", - "èĪ ĩ", - "å¹´ çļĦ", - "ĠDis covery", - "ì¸ ł", - "çĪ ¶", - "Ġinterpre ter", - "Ġb red", - "ĠPsal m", - "Ġdef ended", - "ìī ¬", - "ĠEr fahr", - "ĠPe ach", - "Ġmo ons", - "ĠO st", - "Ġspé cial", - "Ġarri ver", - "ĠW is", - "u ci", - "Ġrobot ics", - "I VE", - "Ġsie ge", - "ar la", - "Ġsepar ates", - "ĠT C", - "íı °", - "quis ite", - "Ġparenth eses", - "ик е", - "ç« Ļ", - "Ġtr ous", - "å» º", - "ĠÑģ илÑĮ", - "Ġbe ers", - "Ġпл аÑĤ", - "ãģĻãģĶ ãģĦ", - "Ġso la", - "Ġd ès", - "ming ham", - "ik te", - "Ġo ops", - "Ġtw itch", - "å° ĩ", - "Ï Ī", - "ĠShould n", - "uv re", - "Ġle er", - "cript ions", - "Ġeyes hadow", - "ĠGu o", - "ĠPow ell", - "Ġsup uesto", - "Ġan a", - "r als", - "ĠMont real", - "Ġsurf ing", - "ĠÐŁÐµÑĢ в", - "×ŀ ×ķ", - "Ġmillise conds", - "Ġsubur bs", - "Ġplanet a", - "ÑĥÑĪ ка", - "hr lich", - "ĠH Y", - "Ġس ÛĴ", - "ĠM M", - "ĠE ff", - "åı¯ æĦĽ", - "ĠH S", - "ans on", - "Ġì§ģ ìłij", - "Ġsu o", - "Ġdeploy ing", - "Ġk unt", - "ter ing", - "Ġere ct", - "ìŀ¥ ìĿ´", - "ĠìĿĮ ìĭĿ", - "Ġspec imen", - "! ...", - "æĪij 說", - "Ġlig ne", - "Ġk onst", - "ade qu", - "Ġìĥģ íĥľ", - "Ġaccess ed", - "ĠP ole", - "k ill", - "Ġë² Ħë", - "Ġauthentic ity", - "Ġapp elle", - "ull e", - "Ġrev ision", - "Ġgo ats", - "г ли", - "Ġp au", - "ĠR anger", - "ĠIm ag", - "aut hor", - "Ġe ve", - "ĠMess enger", - "Ġn ay", - "Ġwh oles", - "ät te", - "Ġon wards", - "ĠDep ois", - "Ġíijľ íĺĦ", - "ĠSAR S", - "Ġwszystk ich", - "Ġdest ru", - "umb ing", - "Ġcompat ibility", - "Ġmis information", - "od ore", - "ĠF avor", - "ek o", - "ı Į", - "w aukee", - "ĠTe aching", - "ĠK O", - "Ġbet ting", - "Ġquest s", - "Ġviv re", - "ĠмÑĥз Ñĭ", - "Ġs aga", - "Ġswe ll", - "Ġge he", - "æĢİ麼 樣", - "ĠоÑĢг аниз", - "Ġg ide", - "ĠG ross", - "Ġdale j", - "Ġcl aws", - "á»Ļ c", - "Ġprejud ice", - "Ġins ign", - "i hood", - "Ġpl ed", - "Ġdó nde", - "ĠPolit ical", - "Ġprem ises", - "und ert", - "ع ت", - "on nen", - "Ġespa ço", - "Ġf é", - "ĠHarr ison", - "ĠC ensus", - "Ġcard io", - "Ġdi y", - "Ġmil ieu", - "Ġjourn ée", - "ĠRe lease", - "N IE", - "ĠM uk", - "id ée", - "á»į i", - "Ġiç inde", - "ŀ Ļ", - "Ġreson ate", - "Ġm oles", - "ĠF lying", - "ĠGl oria", - "ĠPast or", - "ĠAre na", - "好 ä¸į好", - "N ON", - "ол ов", - "Ġall ÃŃ", - "om at", - "ìĸ´ë ıĦ", - "Ġcaracter ÃŃst", - "Ġdecl ining", - "Ñĸ Ñı", - "an co", - "ĠIn form", - "Ġbarg ain", - "Ġbus hes", - "ĠNat urally", - "Ġre chts", - "ĠT ensor", - "ĠPat ricia", - "Ġprincip io", - "ĠM umbai", - "Ġwom b", - "Ġnost ra", - "Ġdile mma", - "Ġirgendw ann", - "Ġ196 4", - "Ġenerg ÃŃa", - "Ġна ÑĢ", - "Ġseg regation", - "ĠA thlet", - "Ġ» ,", - "Ġy eni", - "ĠSe it", - "Ġven om", - "Ġdak ika", - "Ġëı Įë", - "ĠÃī l", - "Ġf us", - "ĠM og", - "¦½ ëĭĪëĭ¤", - "Ġrem ar", - "ĠTed dy", - "Ġbreast s", - "ic ans", - "æĶ¶ çľĭ", - "k ap", - "Ġh Æ¡n", - "ĠJ P", - "ãĥ³ ãĤ¿", - "Ġresur rect", - "ĠìĿ ¸ë", - "her ical", - "Ġfot ograf", - "ĠJos é", - "Ġlivel ihood", - "Ġbib li", - "ter i", - "Ġvor stellen", - "ĠA AA", - "Ġassess ing", - "Y A", - "Ġspl end", - "Ġexca v", - "Ġbapt ism", - "y ll", - "w ow", - "M ac", - "Ġpl astics", - "teok bokki", - "Ġintéress ant", - "Ġcommand ed", - "Ġfamous ly", - "ĠÐĺ ли", - "ĠMan uel", - "Ġsouth west", - "Ġde formation", - "ÃŃcul o", - "ĠнаÑħод иÑĤÑģÑı", - "ĠP atter", - "d egree", - "ĠczÄĻ sto", - "\" -", - "Ġìħ ĭ", - "Ġman ger", - "ĠTrust ee", - "Ģë ¦¬", - "Ġpunt os", - "iv able", - "Ġvol atile", - "ĠëĬ IJ", - "Ġinst ability", - "Ġc iel", - "ci Äħ", - "Ġpur ity", - "но ÑģÑĤ", - "S il", - "ed ar", - "åĻ ¨", - "NOUN CER", - "Ġspe lled", - "G ER", - "Ġsanct uary", - "Ġacceler ating", - "Ġsc out", - "ĠпÑĢ ев", - "f ahren", - "ãģĵ ãģ¡ãĤī", - "ĠëĤĺìĺ ¨", - "Ġpocz Äħt", - "ĠMe u", - "ka ar", - "³´ ê³ł", - "ak ra", - "D own", - "ĠÃĦ r", - "ĠEl ite", - "Ġall ons", - "Ġmay onnaise", - "ĠS ustain", - "prising ly", - "Ġsuper vis", - "Ġê·¸ëłĩ ì£ł", - "Ġunemploy ed", - "Ġfresh ly", - "Ġ×ŀ× ¢", - "ĠD h", - "Ġtack ling", - "Ġo gr", - "Ġì´ Īë", - "ãĤĪ ãĤį", - "Ġlo ft", - "ar ah", - "ĠA irl", - "ĠD ir", - "ĠÐľ ожно", - "Ġbook ing", - "ĠC RA", - "Ġhtt ps", - "Ġcho ke", - "Ġg own", - "Ġno ite", - "Ġz ac", - "ist ol", - "Ġsec re", - "Ġresemb les", - "Ġcu ad", - "ìĤ¬ ê°Ģ", - "sh ow", - "Ġbl anc", - "Ġag u", - "ĠPr int", - "ast ed", - "ĠWe ather", - "i pl", - "Ġobsc ure", - "Ġcont e", - "ough s", - ") ;", - "ĠD ame", - "ä¸Ģ 缴", - "Ġclar ification", - "Ġintim acy", - "Ġup hold", - "ĠMir ror", - "Ġw agon", - "x ide", - "Ġcl og", - "app er", - "ĠImmedi ately", - "ú de", - "Ġtouch down", - "Ġro oft", - "аÑĪ а", - "Ġç ıkt", - "Ġla isser", - "ĠUn real", - "ens itive", - "Ġ12 3", - "Ġpl aster", - "Ġduck s", - "Ġet me", - "Ġb ishop", - "bre vi", - "Ġb ic", - "ä¸ĭ åİ»", - "Ġrun time", - "Ġamb itions", - "м аÑĤ", - "ĠWe in", - "ĠMar i", - "ĠíĬ ¸ë", - "Ġresol ver", - "Ġng Ãły", - "ĠR ise", - "ãĤĪãģĨ ãģ«", - "ĠCr us", - "Ġmerchand ise", - "Ġel i", - "Ġstate wide", - "Ġow l", - "éģ ł", - "æĶ ¹", - "Ġtwist ing", - "Ġcontam inated", - "ĠCom merce", - "hy thm", - "Ġà Ī", - "Ġìĭ ¤ë", - "Ġmus ste", - "u ir", - "Ġsum s", - "ĠSome where", - "ãĥ İ", - "Ġk ami", - "Ġa ired", - "ĠAND REW", - "Ġê º", - "Ġv iendo", - "Ġantib ody", - "Ġabsol ument", - "Ġprotest ers", - "ĠQué bec", - "st adt", - "Sha un", - "Ġcham bers", - "ĠWe ar", - "ĠEffect s", - "Ġhaz ards", - "Ġne i", - "Ġcoraz ón", - "Ġá ¼", - "ĠS G", - "Ķ ©", - "ĠìĹŃ ìĭľ", - "Ġcom fy", - "ĠC ody", - "Ġpens ando", - "Ġg anska", - "ĠAc ross", - "öll ig", - "aby te", - "Ġwed ge", - "Ġkal ian", - "Ġsig ue", - "end es", - "ĠGro ÃŁ", - "Ġutil iser", - "Ġfl own", - "ани Ñİ", - "Ġle var", - "rest rial", - "Ġillust rations", - "Ġas lında", - "BLE EP", - "Ġдо ÑģÑĤ", - "Ġtur ret", - "Ġsuit case", - "ziÄĻ ki", - "Ġsket ches", - "Ġac red", - "ĠRe i", - "Ġt sun", - "ĠS ag", - "Ġthird s", - "ĠKIR BY", - "ra i", - "Ġhuman os", - "Ġrecomm ends", - "Ġextraordin arily", - "Ġcommence ment", - "K N", - "ope z", - "Ġ×ij× ©", - "Ġlet hal", - "ĠEst amos", - "Ġinspect or", - "ĠSe ok", - "e un", - "Ġoff shore", - "Ġget tin", - "ye ars", - "ĠSil ence", - "ĠNat ur", - "up un", - "Ġtr zy", - "Ġno get", - "Ġhamb urger", - "ĠPra ise", - "é nd", - "Ġ197 1", - "yl ie", - "k rit", - "ĠìĥĿê°ģ ìĿ´", - "çļ ®", - "Ġmoment os", - "Ġest é", - "Ġdisse min", - "Ġgig s", - "Ġdes af", - "Ġav is", - "ĠZ oo", - "ĠìķĬ ìĿĢ", - "h äng", - "åı ¥", - "h ake", - "ĠB ism", - "Ġre think", - "ĠMal colm", - "Ġident ifies", - "l ower", - "ix el", - "Ġtv Ã¥", - "k ed", - "ier z", - "Ġö ffentlich", - "Ġproc laim", - "so on", - "l ol", - "Ġlo i", - "Ġb itten", - "ro llo", - "Ġser mon", - "Ġes qu", - "Ġjack ets", - "Ġgr áfic", - "Ġпок азÑĭв", - "Ġcabe za", - "ch odzi", - "Ġpel vis", - "Ġnost algia", - "Ġbre w", - "Ġshort cuts", - "ĠAd emás", - "Ġsuperfic ial", - "åħ© åĢĭ", - "Ġbo ca", - "ĠæĪij æĺ¯", - "iment os", - "åĽł 为", - "Ġspr outs", - "é£ Ľ", - "ĠJon as", - "ĠFloren ce", - "st atic", - "da ughter", - "* )", - "ÅĤ by", - "f ashion", - "ĠG inger", - "Ġë§ ¤ë", - "Ġhust le", - "ut os", - "ĠÑĤ Ñıж", - "ĠL ös", - "ש ×Ļ×Ŀ", - "any ch", - "tu ber", - "Ġtid y", - "Ġfront al", - "Ġwhis key", - "Ġhum id", - "ĠÎ Ł", - "Ġr idge", - "Ġmar in", - "Ġb ientôt", - "ĠCarr ie", - "ch w", - "Ġtah un", - "ĠEr geb", - "F R", - "Ġìłķ ë¶Ģ", - "ĠSold ier", - "Ġenlight enment", - "Ġexam ining", - "ĠNot re", - "Ġer am", - "ĠSun ny", - "Ġlay ered", - "ĠD azu", - "r ades", - "好 åIJĥ", - "ĠнаÑĪ ей", - "Ġtim ber", - "Ġman ners", - "ĠBir mingham", - "Ġmini ature", - "omet ers", - "Ġfill er", - "ĠR ip", - "ĠK omb", - "own er", - "ì ¿", - "id ian", - "Ġdem ás", - "ĠÙĪ ت", - "Ġpreca utions", - "Ġgovern o", - "z elf", - "ĠCom plete", - "å¸ ĥ", - "ĠPh antom", - "ãģ¾ ãģļ", - "Ġн ез", - "ĠкаÑĢ ÑĤ", - "ĠAnt wort", - "ĠPf izer", - "ĠFran co", - "Ġw ÅĤ", - "Ġfr ig", - "es per", - "Ġk ale", - "Ġfilm maker", - "Ġk urt", - "Ġinv alid", - "å± Ģ", - "are lla", - "Äĥ ng", - "ram ento", - "Ġnutr itional", - "Ġdict ators", - "Ġaf in", - "Ġf uzzy", - "ĠG ina", - "ó t", - "ĠExtrem adura", - "Ġdemonst rations", - "ĠMont gomery", - "íķ´ì Ħ¤", - "ĠGand hi", - "ãĥ Ŀ", - "ç½ ®", - "Ġreun ion", - "Ġjaki ÅĽ", - "ĠZ ug", - "OU GH", - "l ifting", - "Ġ à²", - "á¹Ľ á¹£", - "e b", - "ĠW OW", - "ĠSh iva", - "omet ry", - "Ġwild ly", - "Ġt ended", - "Ġmeg ap", - "ì² ĺ", - "Ġna use", - "Ġg erek", - "ãĥ ĭ", - "ĠMar cel", - "Ġn este", - "Ø® ر", - "Ġfe h", - "åĨ ħ", - "susp enseful", - "ĠWrest le", - "ĠPalestin ians", - "ĠG ORD", - "iy et", - "ĠÑĢ ади", - "Ġvers uchen", - "Ġtrans istor", - "ĠÐŁÑĢ оÑģÑĤо", - "Ġпон ÑĢав", - "Ġrhy me", - "ĠVerm ont", - "pl atz", - "è® °", - "ĠÄ°ÅŁ te", - "ĠH ag", - "ĠÐĺ м", - "ĠÑĢаÑģÑģк аз", - "Ġmet ros", - "ĠInfin ity", - "w olf", - "ib al", - "ft ig", - "Ġ ÚĨ", - "Ġíĺ¹ ìĭľ", - "Ġo ggi", - "Ġdisp osit", - "ĠпÑĢ ил", - "ĠвÑĭ пол", - "Ġth ôi", - "ĠK ENN", - "Ġhand ing", - "act us", - "Ġtac os", - "Ġformer ly", - "ĠCorinth ians", - "ãģ« ãģ¯", - "ÑĨÑĸ ÑĹ", - "Ġpad re", - "Ġcongreg ation", - "æ ij", - "fer t", - "Ġsub ir", - "ais er", - "qu a", - "ara oh", - "ĠCur ry", - "ĠìķĬ ëĬĶ", - "ел Ñİ", - "Ġf uss", - "Ġbo oty", - "Ġl ows", - "Ġh ommes", - "ĠM H", - "ĠDisney land", - "w ent", - "Ġresid ue", - "Ġbe eping", - "è¼ ķ", - "ät ta", - "Ġm ould", - "ĠPro jekt", - "st alk", - "Ġartif act", - "ĠAnt rag", - "ĠAM D", - "ĠCry pt", - "Ġë© Ķ", - "ĠFel ipe", - "ĠCO B", - "el u", - "Ġself ies", - "ĠS anti", - "ch utz", - "ĠУ кÑĢаÑĹ", - "ges amt", - "Ġflo ck", - "j az", - "pl ain", - "Ġwr inkles", - "Ġre ais", - "Ġpal jon", - "Ġempower ment", - "Ġattend ees", - "pp a", - "Ġn eden", - "он Ñĭ", - "Ġtime frame", - "ĠCher ry", - "Ġid ée", - "Ġg ag", - "Ġdon key", - "Ġô ng", - "ĠH are", - "éļ Ľ", - "ĠK ara", - "Ġacom pan", - "pl aces", - "im ientos", - "ĠH amm", - "б и", - "ub en", - "ili yor", - "Ġth irst", - "Ġk ry", - "ĠGeorget own", - "׳ ×Ķ", - "Ġor ch", - "Ġheart beat", - "Ġtransform ations", - "est ones", - "ĠK H", - "Ġcart oons", - "Ġan ci", - "Ġworth less", - "Ġtail ored", - "p u", - "Americ ans", - "Ġp iles", - "ĠMon key", - "Ġbas in", - "ĠTem per", - "ĠP aint", - "Ġpunch ing", - "Ġba ik", - "ĠOak land", - "v re", - "ÅŁ allah", - "yd d", - "Ġcas ually", - "od u", - "Ġc oded", - "ĠNorweg ian", - "ĠV ince", - "Ġprem ature", - "ĠProm ise", - "ек ÑģÑĤ", - "Ġdevast ated", - "ĠPrem ium", - "ĠPar am", - "ĠÃĸ yle", - "um uz", - "P O", - "r ators", - "Ġlamp s", - "Ġterritor ial", - "Ġback bone", - "list ed", - "D Y", - "ĠاÙĦ ر", - "Ġpurs ued", - "ĠComm ons", - "Ġê³ ¡", - "lo cks", - "ed or", - "Ġconce ived", - "g ere", - "Ġdisappe aring", - "ĠS ull", - "ĠìĹ °ë", - "Ġho ffe", - "Ġdet ox", - "íĶ Į", - "Ġret ir", - "ĠëģĿ ëĤ", - "Ġper gunta", - "ĠB OY", - "ç² ¾", - "Ġp enn", - "æĿ¥ äºĨ", - "h és", - "h on", - "Ġcatastroph ic", - "Ġa ust", - "Ġtor so", - "Ġìĸ´ ëĬIJ", - "ĠìĤ¬ëŀĮë ĵ¤ìĿ´", - "Ġmarvel ous", - "ĠHar ley", - "ach ine", - "Ġti ế", - "itt o", - "ĠI ÃŃm", - "yl on", - "Ġshut down", - ".' '", - "Ġap ologies", - "ĠCommun ication", - "ĠговоÑĢ Ñİ", - "ãģĤ ãĥ¼", - "âĦ ¢", - "ÃŃ veis", - "ac un", - "Ġret aining", - "Ġcontrad iction", - "ĠAD AM", - "C OM", - "Bry an", - "ĠM onsieur", - "Ġadap ting", - "Ш ÐIJ", - "ĠSc r", - "änd ert", - "Ġpl aus", - "ä»Ĭ天 çļĦ", - "Ġon set", - "Ġassist ants", - "Ġval ves", - "Ġsc atter", - "ĠR ust", - "aw ia", - "Ġread iness", - "Ġp ais", - "Ġb ible", - "Ġamb iente", - "Ġа меÑĢик", - "Ġunc ond", - "Ġk alk", - "åĬ ¨", - "Ġmo c", - "un n", - "Ġact u", - "Ġhum ming", - "iss imo", - "ĠPat rol", - "g ow", - "ãĥ ¤", - "ĠTHE Y", - "ĠBod en", - "ĠB ie", - "Ġre el", - "ĠÑĥÑģл ов", - "Ġende avor", - "ĠPer iod", - "ustom ed", - "m als", - "al on", - "B ox", - "ĠÏĥ αÏĤ", - "Ġom dat", - "Ġal tre", - "ĠHe h", - "k ad", - "Ġprotect or", - "Ġdomin ance", - "odynam ic", - "Ġcommunic ated", - "k ö", - "Ġprede cessor", - "ĠL uk", - "ĠFl ower", - "Ġãģ ©", - "po que", - "ÑĤи ÑĢов", - "Ġret rospect", - "Ġdecis ive", - "Ġexem pel", - "{ \\", - "ĠR ück", - "r ite", - "ĠZe us", - "Ġcal orie", - "Ġattract ions", - "ĠH inter", - "Ġuh m", - "ĠíĮ IJ", - "Ġrul ers", - "Ġdiscour aged", - "Ġaconte cer", - "Ġacc ents", - "ĠOpt im", - "ĠAl g", - "k ids", - "20 21", - "ĠLind say", - "Ġfilm makers", - "pr owad", - "Ġter ug", - "ëĭ ´", - "ĠSom mer", - "20 18", - "Ġborrow ing", - "ĠTrans fer", - "н оп", - "ari as", - "Ġhead phone", - "ì¼ ľ", - "Ġtransl ating", - "Ġauf ge", - "ப à®Ł", - "we is", - "av ant", - "pa id", - "b aby", - "Ġtough est", - "Ġrepe ats", - "ĠTer esa", - "L ord", - "Ġacab ar", - "ĠR ide", - "d ir", - "Ġl eng", - "Ġd wa", - "Ġhead aches", - "Ġn ữa", - "ĠнаÑģ ÑĤоÑıÑī", - "Ġbo ils", - "Ġlong ing", - "ri as", - "ó rio", - "ĠParad ise", - "ĠSeñ or", - "erd em", - "Ġrein st", - "Ġsal aries", - "Ġinsec urity", - "ÅĤo ÅĽci", - "ĠабÑģолÑİÑĤ но", - "ink en", - "ĠEd dy", - "ud os", - "Ġd ummy", - "Ðļ ак", - "s ix", - "Ġin box", - "Ạ©", - "Pe ople", - "á»ĵ ng", - "Ġorganiz ers", - "f ind", - "Ġü l", - "ĠCO M", - "ż a", - "we ile", - "Comment ary", - "íĬ¸ë ¥¼", - "ĠMitt el", - "k us", - "èĽ ĭ", - "ठ¨", - "ir al", - "Ġgar ment", - "ικ ά", - "Ġst ool", - "pay ers", - "Ġsh immer", - "ĠO llie", - "ĠJe żeli", - "è¿ĺ æľī", - "Ġ197 7", - "Ġje ux", - "Ġext inct", - "ĠTransport ation", - "ĠM aker", - "Ġj ohn", - "Ġrich est", - "Ġtraum at", - "Ġli egen", - "´ë ¥¼", - "è¿Ļ éĩĮ", - "Ġun rest", - "ĠSt raw", - "æĭľ æĭľ", - "Ġcom a", - "ĠKr isten", - "ĠÐļон еÑĩно", - "ĠBry ce", - "ĠÑıк Ñĸ", - "Ġpearl s", - "Ġпоним аÑİ", - "Ġadd itions", - "Ġas ympt", - "ĠменÑĮ ÑĪе", - "Ġsc ans", - "Ch ild", - "ĠH ide", - "к ÑĥÑİ", - "et as", - "Ġd ank", - "Ġple as", - "Ġess ays", - "Ġj ets", - "åħ Ĵ", - "Ġв ед", - "Ġposit ives", - "ho f", - "- )", - "zz o", - "Ġstar ters", - "Ġsm iled", - "Ġ194 4", - "qu iera", - "Ġro k", - "Ġpu esto", - "N ico", - "Ġsim ulations", - "Ġ à¶", - "Ġintrig ued", - "ĠOver watch", - "åĸ Ĥ", - "s igh", - "b ai", - "Ġë§IJ ê³ł", - "id é", - "Ġcra bs", - "áºŃ p", - "ĠIraq i", - "ìĿ´ë ¥¼", - "ÑĤ Ñı", - "ĠSoph ia", - "ĠDN S", - "Ġönem li", - "ĠLu o", - "Ŀ ¤", - "ĠCoun sel", - "l igen", - "анÑĮ ÑĪе", - "Ġtrump et", - "Ġd apat", - "ĠJ M", - "ĠEVER Y", - "Ġå°į ä¸įå°į", - "å¤ ¢", - "ĠL ayer", - "Ġc ô", - "н ал", - "ĠJ oo", - "ĠH ack", - "Ġs unt", - "ĠLeon ard", - "ĠFire base", - "äng er", - "Ġexpl oding", - "v oy", - "Ġì¦ IJ", - "ĠÑģ еÑĢÑĮ", - "Ġsever ity", - "Ġbest imm", - "çµIJ æŀľ", - "Ġt iring", - "Ġprocure ment", - "Ġdiplom acy", - "Ġdecor ative", - "ĠÙĬ ا", - "Ġpenet ration", - "Õ «", - "Ġout right", - "EN E", - "ĠUn i", - "od les", - "Ġz eros", - "Ġdelight ful", - "j m", - "Ġdo po", - "没 äºĭ", - "Ġposit ivity", - "ĠVIS TA", - "ĠRes ource", - "íĥ Ģë", - "ÑĪ ие", - "C arl", - "Ġpip ing", - "Ġchop ping", - "ĠGan ze", - "ü ss", - "ĠA o", - "Ġsh attered", - "ĠDet ective", - "Ġund oubtedly", - "Ġhall uc", - "Ġen ch", - "Ñĭ Ñĩно", - "ÑĥлÑı ÑĢ", - "is esti", - "Ġped als", - "Ġdur um", - "¤í Ķ", - "la imer", - "Ġprop re", - "C u", - "Ġtransl ator", - "Ġca ÅĤ", - "Ġê·¸ 걸", - "Ġca ÅĤy", - "U A", - "Ġrev ised", - "Ġпод об", - "ĠArt icle", - "ĠHait i", - "Ġà ĵ", - "ĠC trl", - "Ġroz m", - "la it", - "Ġletz te", - "is pering", - "dis play", - "Ġalumin ium", - "Ġpalab ras", - "Ġconoc er", - "Ġz itten", - "Ġdir ig", - "åıª æľī", - "Ġbrain storm", - "Ġw ifi", - "ĠPart icip", - "Ġview point", - "ĠQu an", - "Ġhier arch", - "W elcome", - "å¯ ¾", - "Ġoff en", - "ĠRe covery", - "gan o", - "W ould", - "Ġrep ro", - "Ġper ceptions", - "Ġdem asi", - "ĠBangl adesh", - "ĠIncred ible", - "Ġlet zt", - "Ġbehav ing", - "Ġaston ishing", - "Ġâ Ĩ", - "ĠëĤ¨ ìŀIJ", - "èµ° äºĨ", - "ãĥ Ķ", - "ĠGORD ON", - "C AR", - "? !\"", - "ĠP rest", - "Ġë§ŀ ìķĦìļĶ", - "Ġt and", - "Ġl ash", - "ç Ĭ", - "ific ant", - "Ġint oler", - "Ġг еÑĢо", - "Ġte u", - "as o", - "ĠÑģов еÑĤ", - "Ġtravel ers", - "ĠSy nd", - "ĠвеÑĢ Ñģ", - "F onda", - "ad ı", - "Ġtrans cription", - "Ġtit anium", - "Ġtw ists", - "Ġgear box", - "ens ation", - "f at", - "C oll", - "ĠCommon wealth", - "z on", - "ĠPolize i", - "ĠAPP LAUSE", - "f ry", - "ĠJud a", - "este em", - "Ġso ck", - "ĠJug end", - "Ġк ÑģÑĤаÑĤи", - "ĠD ro", - "Ġproch aine", - "ãĥ¼ ãĥ«", - "Ġli ksom", - "ĠEner gie", - "ĠMar ina", - "Ġ2 30", - "Ġê°Ģ ìĦľ", - "ump ing", - "Ġl one", - "ç´ ļ", - "Ġfont s", - "Ġbusiness man", - "Ġp ly", - "Ġdo e", - "gr id", - "ĠMil waukee", - "ĠE den", - "! \".", - "ĠÛĮ Ûģ", - "og ens", - "Ġteas er", - "Ġqui én", - "Ġincent iv", - "go vern", - "Ġchild care", - "Ġsneak ers", - "Ġimprison ed", - " ®", - "иÑĤ еÑģÑĮ", - "an bul", - "Ġreg ain", - "Ġtranqu il", - "Red ner", - "éĽ ¨", - "IF A", - "Ġide ological", - "Ġmayor ÃŃa", - "Ġb ureau", - "et erm", - "ĠD ID", - "ìĬ ·", - "Ġw aving", - "Ġbe b", - "Ġá r", - "Ġк в", - "Ġenv oy", - "an ut", - "ик Ñĥ", - "ĠEnviron ment", - "ĠAss ass", - "ãĤĵ ãģ§", - "ĠB read", - "ĠТ ÑĥÑĤ", - "Ġstair case", - "ĠDise ase", - "Ġauc un", - "Ġëĭ Ī", - "Ġconfront ation", - "Ġ194 1", - "Ġiron y", - "Ġwor sh", - "ãĤĮ ãĤĭ", - "Ġf ick", - "ĠNa omi", - "Ġback side", - "ie ux", - "K ap", - "Ġved ere", - "Ġlength y", - "Ġbreak er", - "ĠRoll e", - "Ġpred ator", - "Ġnoss os", - "Ġadvert ise", - "è³ ĩ", - "ÑĢод е", - "Redner wechsel", - "re ten", - "Ġcollect ors", - "ıģ ımız", - "Ġtr ig", - "Ġax es", - "in ters", - "Ġpen alties", - "ĠOs man", - "ĠJen na", - "Ġfl akes", - "Ġtrain ers", - "Ġstun ned", - "ĠSc roll", - "ĠP ip", - "Ġна ÑģÑĤ", - "Ġnh Ãł", - "ĠSm ack", - "ẫ n", - "rat os", - "ĠÑĢабоÑĤ Ñĭ", - "Ġu cz", - "ĠLem on", - "ĠS ind", - "Ġpsych ic", - "ĠAb g", - "Ġmamm als", - "Ġimmers ive", - "Ġb ots", - "Ġverschied ene", - "Ġg eral", - "Ġfoll ower", - "Ġ ä»ĸ", - "Ġsegur idad", - "Ġimmers ed", - "fe ito", - "c ross", - "Ġö ld", - "íĥ Ħ", - "Ġãģĵ ãģ®", - "Ġ×Ķ ×Ļ×IJ", - "ĠJ ian", - "Ġbili yor", - "are a", - "Ġk af", - "Ġgod t", - "缸 ä¿¡", - "Ġë°© ìĨ¡", - "Ġdet riment", - "æ¥ ļ", - "Ñĸ л", - "ĠÄij âu", - "Ġchlor ide", - "ø re", - "le i", - "Ġmont e", - "Ġdifférent es", - "à¯ģ .", - "Ġcareg ivers", - "Ġin adequ", - "Ġfare well", - "ĠÑĤип а", - "ont ec", - "ĠE ph", - "HH H", - "ĠTod os", - "ĠС ШÐIJ", - "Ġtro v", - "Ġl ige", - "Ġc ông", - "ĠC iv", - "Ġcap az", - "ĠV allahi", - "Ġquest e", - "Ġrepl ica", - "س ب", - "z na", - "ĠÑģл Ñĥж", - "ĠP T", - "w ave", - "ien i", - "Ġrel ied", - "de velop", - "Ġdem e", - "ĠA man", - "Ġ[ ...]", - "Ġcompl iments", - "u ais", - "ĠíĮ ¨", - "Ġsmell ing", - "Ġdad urch", - "ÙĪ ت", - "Ġor anges", - "Ġл ай", - "Ġstabil ization", - "åĢ į", - "ãĤĮ ãģŁ", - "æ¥ ½", - "Ġappl iances", - "Ġh m", - "ĥ IJë©´", - "odynam ics", - "Ġc iÄĻ", - "ĠC ott", - "M ON", - "ĠM ang", - "æĶ¯ æĮģ", - "Ġall erdings", - "ικ ή", - "sh ots", - "Ġt s", - "ĠG ör", - "ĠCH AR", - "Ġ: (", - "Ġwr ath", - "Ġf ique", - "Ġfüh ren", - "Ġtest ament", - "Ġ^ ^", - "á¹Ľá¹£ á¹ĩa", - "AL D", - "Ġtext o", - "ĠDog s", - "Ġs ib", - "Ġpath etic", - "ock s", - "Ġrad ically", - "ĠM ORE", - "ĠJAM ES", - "Ġing l", - "ĠTechn ical", - "Ġpor ch", - "ĠU T", - "ĠобÑıз аÑĤелÑĮно", - "Ġrenew al", - "Ġaesthet ics", - "ik um", - "Ġbe verage", - "der n", - "Ġpredict ive", - "Ġch uy", - "ĠRegard ing", - "ĠFor ward", - "ĠÙĪ ÙĦ", - "Ġcontext ual", - "Ġdwar f", - "Ġpre he", - "Ġgovern ed", - "ħ Ħ", - "Ġtrabal har", - "Ġnegó cio", - "ĠболÑĮÑĪ ой", - "еÑĩ аÑĤ", - "Ġд ÑĥÑħ", - "Ġflood s", - "Ġbow ling", - "ĠO B", - "ĠH är", - "Ġgrad ing", - "주 ëĬĶ", - "Ġg ars", - "d ling", - "Ġr ak", - "ë Ī", - "c reat", - "ĠÑī е", - "Ġneighb ours", - "f ood", - "Qu ery", - "Ġhero in", - "ice ps", - "ĠK inda", - "N ET", - "Ġmar i", - "Ġim itate", - "Ġach ter", - "Ġsettle ments", - "ra re", - "cc iones", - "Ġë ĵľ", - "Ġf ik", - "it ung", - "Ġм акÑģим", - "Ġel f", - "Ġd alla", - "ĠPol sce", - "ĠP ul", - "Ч ÑĤо", - "ĠMor gen", - "ØŃ Ùħ", - "Ġsuprem acy", - "Ġk ys", - "ĠHur ricane", - "ĠG TA", - "ĠFe h", - "Ġfinal mente", - "m und", - "ĠK rie", - "é poque", - "ĠT ucker", - "IT T", - "Ġl ur", - "Ġdi pping", - "ä v", - "Ġeer ste", - "ĠFl int", - "bild ung", - "ู à¹ī", - "Ġto im", - "Ġpr acy", - "Ġtransform s", - "Ġspeed ing", - "Ġpresent er", - "Ġfellow s", - "f illed", - "ie za", - "Ġadv ising", - "ĠInter view", - "и гÑĢ", - "we hr", - "ĠD ante", - "pt ure", - "Īë¬ ¸", - "¯ ¸ë", - "IJ IJ", - "ĠCoun ter", - "Ġcr ist", - "Ġì§ ľ", - "Ġje une", - "ĠÑģÑĤ ÑĢаÑĪ", - "Ġmie Äĩ", - "Ġtut or", - "Ġmas ala", - "Ġpowder ed", - "Ġn au", - "ĠFreder ick", - "Ġbill ing", - "ĠE isen", - "Ġд обÑĢ", - "Ġm est", - "æ ½", - "Ġsn ipp", - "Ġmon o", - "ĠA lo", - "ĠMer cy", - "éri ence", - "Ġcasual ties", - "ĠAN NOUNCER", - "ä» İ", - "Ġto car", - "Ġbacter ial", - "H o", - "Ġstre ak", - "ĠJ ENN", - "Ġpl ast", - "Ñģ лед", - "Ġre app", - "Ġpay check", - "Ġmin ers", - "hab t", - "ĠJ ap", - "н ÑĥÑĤ", - "Ġred emption", - "Ġqu ir", - "hn lich", - "Ġaccum ulation", - "Ġsh ove", - "Ġadrenal ine", - "M ake", - "ĠH ern", - "oss ing", - "ĠV il", - "ub by", - "her tz", - "bre aks", - "Ġsp ur", - "ĠD aha", - "US TIN", - "Ġcontinu er", - "ĠSa ul", - "ãģ® ãģ¯", - "Ġíı Ń", - "ĠëIJĺë ©´", - "Ġë§IJìĶ Ģ", - "Ġо ж", - "Ġsuspect s", - "Ġla quelle", - "ĠMuch as", - "Ġv öllig", - "ul en", - "Ġimp res", - "Ġlo bb", - "ene e", - "Ġн аж", - "T a", - "Ġréal ité", - "ĠRe x", - "Ġharvest ing", - "Ġest r", - "æ ¶", - "osp ace", - "OS S", - "Ġdisturb ance", - "ass ic", - "ĠIs ab", - "Ġdéc ouv", - "ĠHamp shire", - "Ġor nament", - "Ġlu ôn", - "ĠU W", - "Ġj Äħ", - "éĤ£ ä¹Ī", - "Ġrespect o", - "Ġcomun idad", - "Ġcom igo", - "ag na", - "Ġintrins ic", - "ĠAlum ni", - "Ġses leri", - "Ġestim ation", - "âĢĶ âĢĶ", - "Ġprodu it", - "ãĢĤ ãĢį", - "Ġв ÑĢ", - "Ġwh irl", - "Ġac ces", - "ç u", - "Ġvari ability", - "Ġv odka", - "its u", - "Ġinternship s", - "Ġalloc ate", - "R R", - "íĽ Ī", - "Ġinstruction al", - "t ant", - "Ġà®ħ த", - "Ġinv ites", - "Ġha k", - "Ġsca res", - "Ġe clipse", - "п ов", - "к олÑĮ", - "ativ as", - "Ġstab bed", - "ĠD OM", - "ä¸į åĪ°", - "ro ots", - "ĠPict ure", - "íĺ ¼", - "ĠC HA", - "ie c", - "ı ı", - "han ol", - "Ġmisunder stand", - "R ay", - "Ġroad map", - "ocument ed", - "iz ione", - "ĠOl ive", - "r ift", - "Ġ×Ķ× ł", - "æ¯ į", - "l est", - "; ;", - "ĠE A", - "éľĢ è¦ģ", - "од Ñĥ", - "Ġhob bies", - "Ġbur ial", - "ãģ« ãģ¡ãģ¯", - "Ð ¤", - "le ge", - "ĠH J", - "Ġobject ion", - "Ġãģ Ń", - "ct ory", - "Ġincre mental", - "Ġgym n", - "Ġepid emi", - "Ñģ Ñĭл", - "à ij", - "Ġadvance ment", - "Ġpar ch", - "New s", - "Ġa yr", - "л ам", - "Ġ×ľ× ©", - "Ġdipl oma", - "ãģ¡ãĤĥ ãĤĵ", - "Ġrob bed", - "On ly", - "Ġinc ur", - "Ġch anting", - "Ġíķ´ë ıĦ", - "Ġrich es", - "ĠCar men", - "Ġnost ro", - "λ ÎŃ", - "ĠPow der", - "à¹Ģภ«", - "ĠìŀĪ ìľ¼ë©´", - "Ġgerçek ten", - "ĠPik achu", - "ем он", - "OL L", - "Ġplanet ary", - "Ġsl ows", - "Ġclock wise", - "al ion", - "Ġì Į", - "Ġver n", - "Ġh omme", - "Ġend point", - "Ġinnoc ence", - "Ġelement os", - "Ġsophom ore", - "Ġnot ions", - "ĠCould n", - "p ur", - "Ġz at", - "Ġobs ess", - "Ġmotiv o", - "ĠK ub", - "ĠDr ug", - "A nt", - "ĠPlay ers", - "ĠHum ans", - "Ġme lee", - "ĠWild life", - "ĠV P", - "Ġvolcan ic", - "Ġcom in", - "ĠGu ang", - "ĠÏĦι ÏĤ", - "ĠоÑģоб енно", - "ĠS ize", - "L isten", - "ĠA aa", - "app ro", - "Ġbar bar", - "ĠPark inson", - "нÑı ÑĤÑĮ", - "å į°", - "Ġunderest imate", - "Ġsubst itution", - "Ġcosm etic", - "ä¸ĭ 次", - "Ġwill en", - "Ġbe ide", - "ann i", - "Ġcondition ed", - "ĠDe bbie", - "Ġis to", - "ĠEd wards", - "ìĽĮ ìļĶ", - "ĠÑĤ ов", - "Ġab brevi", - "ĠM ün", - "ĠPr inc", - "ĠLi ang", - "Ġst ink", - "Ġradio active", - "ãģĨ ãĤı", - "Ġac ontec", - "Ġun con", - "ĠTur bo", - "ãģ IJ", - "Ġkiss es", - "æĺ¯ ä»Ģ麼", - "еÑĤ ÑĢов", - "Ġfront ier", - "ĠSp y", - "ĠBel arus", - "ĠC BS", - "á» Ĺ", - "am oto", - "íķľë į°", - "ĠÑģÑĤ ÑĢо", - "ĠEn fin", - "Ġbread th", - "éĺ ²", - "ĠCa fe", - "ĠDaf ür", - "ĠB our", - "ar as", - "Ġbl ueprint", - "an ı", - "Ġconst ants", - "Ġattack er", - "ĠForm ula", - "za Äĩ", - "Ġs owie", - "Ġeyebr ow", - "ob ook", - "Ġset zen", - "第 ä¸ī", - "ons ider", - "aw ning", - "Ġsöyle ye", - "Ġinv aded", - "Ġpronoun s", - "Ġdob ry", - "S i", - "ĠÐ¥ оÑĤ", - "Ġvolley ball", - "Ġl ament", - "is ches", - "ar me", - "ap i", - "ĠW iki", - "ли ÑĪ", - "Ġkas ih", - "Ġp ess", - "ĠÑĦ оÑĤ", - "ĠS ul", - "å¾ ·", - "Ġpse udo", - "Ġmem o", - "ĠìĹ° ìĬµ", - "ĠдоллаÑĢ ов", - "ĠпеÑĢ ем", - "ĠRe ach", - "mir al", - "alt ed", - "Ġstat ut", - "read ing", - "Ġsöy led", - "ĠLind sey", - "ĠAh mad", - "ë ¶Ģë", - "ĠС егоднÑı", - "Ġprzy got", - "Ġhy ster", - "U RE", - "ĠNe igh", - "Rep orter", - "ĠB unu", - "ĠTreat y", - "ĠR ank", - "ĠF ame", - "in ished", - "Ġge ared", - "Ġcomp ose", - "od ia", - "ĠL on", - "Ġjeste ÅĽmy", - "ĠDIRE CTOR", - "Ġel kaar", - "ĠV iel", - "×IJ× ©", - "ynth ia", - "ä¸ ¦", - "Ġm ère", - "ĠTom ato", - "Ġex atamente", - "ni ÄĻ", - "ĠFre i", - "ĠD if", - "Ġopen ings", - "Ġgraph ical", - "ĠÑĥд об", - "ĠвÑģ п", - "ĠWeek ly", - "ев а", - "Ġhang s", - "Ġuns afe", - "Ġem blem", - "ĠKolleg innen", - "al ay", - "Ġk si", - "Ġh ides", - "Ġol may", - "Ġent ste", - "Ġarth ritis", - "ÃŁ erdem", - "Ġbin nen", - "Ġlist ens", - "ĠH ess", - "åĨį ä¾Ĩ", - "ĠLou ise", - "ld en", - "ен Ñģ", - "ĠVers ion", - "ĠAgric ulture", - "ìĬ¤ë ¥¼", - "м ан", - "ë Ħ¤ìļĶ", - "Ġw ines", - "ĠIN F", - "r ul", - "ĠJ K", - "ıyor lar", - "sh ield", - "reat h", - "Ġter us", - "ĠL um", - "Ġanticip ation", - "Ġacc ustomed", - "ĠM ina", - "Ġw ield", - "io è", - "mer a", - "Ġcount down", - "Ġcl ing", - "Ġcomm end", - "Ġfakt iskt", - "Ġdef enses", - "Ġcock pit", - "Ġком анд", - "Ġdish was", - "ĠThan os", - "Ġkid neys", - "Ġse he", - "Ġmicro bes", - "Ġc uff", - "ĠвÑĭÑģ ок", - "ĠSp icy", - "çŃī çŃī", - "வ à®°", - "cul us", - "or c", - "ç¾ ħ", - "ix es", - "ĠC redit", - "Ġr aj", - "Ġbring t", - "ĠN iss", - "Ġgr im", - "ĠS OL", - "Ġten im", - "ĠSud an", - "ĠSp art", - "Ġpromot es", - "ĠN ossa", - "ĠÑģоÑģÑĤо Ñıни", - "Ġì° ©", - "Ġunc ont", - "ĠLiber al", - "ĠТ олÑĮко", - "ĠV iele", - "Ġktóre j", - "Ġ* ***", - "M ax", - "ĠЧ ÑĤобÑĭ", - "3 50", - "Ġíĺ¼ ìŀIJ", - "Ġë¶Ħë ĵ¤ìĿ´", - "Ġwar p", - "Ġteng a", - "Ġsympath etic", - "Ġbiz i", - "ĠZ ack", - "ied o", - "Ġëī ´ì", - "p iel", - "ĠÑĤ ол", - "Ġsc aled", - "ĠPET ER", - "ĠCO MM", - "ĠC ame", - "Ġcatast rophe", - "Ġsweat y", - "ig ration", - "Ġstuff ing", - "ĠÏĢολ Ïį", - "ĠDri ver", - "zy st", - "T ech", - "Ġassess ed", - "ĠSur face", - "ır ım", - "s ur", - "ler weile", - "Ġд ог", - "Ġshut ting", - "Ġfr actions", - "ĠÑģ ол", - "every one", - "Ġer n", - "ĠÐĿ ов", - "Ġdefend ers", - "Ġvers ucht", - "ãĥ³ãĥ Ģ", - "Ġpol ity", - "ĠÐŁ он", - "ver ständ", - "Ġbrows ers", - "Ġtransform ative", - "Ġdict ate", - "ĠLE GO", - "Ġning una", - "ê´ ij", - "Ġp izz", - "ĠHar old", - "ĠL opez", - "Ú¾ ÛĮ", - "an ız", - "atch et", - "ÙĬ ت", - "Ġl ernen", - "Ġê·Ģ ìŬ", - "Ġhous ed", - "Ġclean se", - "ĠW AT", - "lar ation", - "Ġby tes", - "Ġtuck ed", - "Ġfault s", - "д о", - "F X", - "Ġìĸ¼ë§ ĪëĤĺ", - "Ġde form", - "Ġcontract ing", - "ĠTIM E", - "ir se", - "Ġne ben", - "Ġc erc", - "ĠArm strong", - "Ġtest er", - "Ġparf ait", - "Ġjealous y", - "Ġtox ins", - "Ġdis bel", - "ÑĥÑĢ Ñĭ", - "imp ression", - "Ġprost ate", - "Ġfire wall", - "Ġclass ics", - "еÑĩ ÑĮ", - "Ġsocial ism", - "Ġgrac ious", - "ĠÑģ нова", - "Ġд нÑı", - "Ġburn er", - "ĠMin or", - "Ġìļ°ë ¦¬ë", - "Ġjed es", - "Ġcontinu um", - "Ġh ots", - "Ġoccur rence", - "Ġadminister ed", - "Ġзам еÑĤ", - "Ġhes itation", - "Ġdr ills", - "er ca", - "ĠвÑĤоÑĢ ой", - "Ġstead ily", - "Ġinsan lar", - "Ġi han", - "í ij", - "Ġhel per", - "ĠSen in", - "åģ ľ", - "ов ание", - "ĠER IC", - "b la", - "ĠAcad emic", - "Ġhuman ities", - "bl ack", - "ump y", - "ort ex", - "Ġìł Īë", - "ĠØ¥ ÙĨ", - "Ġdiscl ose", - "ĠEl ijah", - "Ġλ ÎŃ", - "ĠQu er", - "ب ÙĦ", - "ãĤ ¡", - "T ell", - "ar le", - "Ñĸ ÑĢ", - "Ġaug mented", - "Ġë¹Ħ ìĬ·", - "Ġand roid", - "ठ¤", - "ar ma", - "Ġs zer", - "ge ord", - "Ġge ek", - "Ġye ux", - "Ġp ong", - "ĠãģĿ ãģĨ", - "Ġtort ured", - "ĠB ath", - "z ig", - "ason able", - "Ġn ets", - "Ġbar u", - "ĠFl at", - "ĠV ater", - "ĠTer ror", - "ĠA vo", - "Ġceremon ies", - "ro e", - "Ùģ س", - "O ps", - "Ġhy vin", - "Ġap resent", - "ol or", - "ĠигÑĢ Ñĭ", - "ort on", - "Ġê·¸ëŀ ¬", - "Ġlook in", - "ĠT Y", - "ĠM int", - "Ad d", - "Ġm ite", - "ĠSm oke", - "Ġnot a", - "Ġm oss", - "ĠAb end", - "Ġì» ¨", - "Ġexagger ated", - "f ires", - "Ġred ist", - "ff iti", - "Ġopen ness", - "ê°IJ ìĿ´", - "ende u", - "ен ной", - "W atch", - "Ġav atar", - "ĠP ey", - "ur un", - "Ġsen za", - "Ġì§Ģ ìĹŃ", - "ĠNat omiast", - "Ġemer gence", - "ray s", - "Ġcraft ed", - "g ary", - "ãģł ãģij", - "ü ng", - "- \"", - "Ġhack ed", - "Ġstr ay", - "en cie", - "em o", - "Ġcom en", - "ĠK ız", - "ĠJ asmine", - "ĠH indi", - "man as", - "Ġinfin itely", - "em on", - "ìĿ¸ëį° ìļĶ", - "j ak", - "Ġro aring", - "éri que", - "s weise", - "ĠRo lex", - "åł± å°İ", - "ĠStu art", - "bn b", - "Ġdiagn ose", - "Ġcoher ent", - "ĠM J", - "æºĸ åĤĻ", - "Ġp ike", - "l av", - "Ġorchest ral", - "а ÑģÑĤи", - "Ġterm inar", - "Ġgather ings", - "Ġcompl iant", - "Ġupgrad ing", - "Ġregul ator", - "Ġlan ç", - "éĢ £", - "Ġmerch ants", - "ta wa", - "Ġmonit ored", - "Ġrend re", - "ä¸ ¤", - "Ġunter wegs", - "ang uard", - "g ard", - "ĠBel ow", - "du ino", - "ĠЦ е", - "Ġimped ance", - "ìľ ¡", - "ä» ½", - "Ġakt uell", - "ĠV atic", - "åŃ ©", - "Ġste wards", - "Ġbright est", - "Ġk enn", - "Ġk au", - "ĠMat rix", - "ĠB ark", - "ĠðŁ ij", - "Ġt aper", - "Ġcas ino", - "ר ×Ķ", - "ys ical", - "Ġbuild ers", - "ĠczÅĤ owie", - "ĠNep al", - "Ġ! \"", - "Ġterm e", - "Ġin nych", - "Ġmath s", - "Ġdraft ed", - "ĠB alk", - "Ġhesit ant", - "Ġvolt ar", - "Ġrev ive", - "ĠÑĦилÑĮ ма", - "Ġassass in", - "ĠS olutions", - "Ġdu el", - "Ġbear ings", - "à¸Ħ ะ", - "Ġrook ie", - "ik at", - "Ġbisc uits", - "Ġc ords", - "Ñĥв аÑĤи", - "AR IN", - "Ġprogress ing", - "ĠG ir", - "Ġpenet rate", - "ĠSt orage", - "e ight", - "ĠÑĤ ÑĢÑĥ", - "Ġdon ÃŃt", - "Ġsiz in", - "Ġout dated", - "ĠнаÑĪ и", - "Ġaff ir", - "Ġspo ons", - "Ġon i", - "Ġfl ank", - "ĠG ol", - "h ã", - "Ġp éri", - "Ġhonor able", - "ĠBreat he", - "sc enes", - "Ġob viamente", - "ик Ñģ", - "Ġש ×ŀ×", - "Ġsmooth ie", - "ŀ Īë", - "Ġd ime", - "ĠíĸĪ ìĸ´ìļĶ", - "Ġapp el", - "ĠCath olics", - "Ġsing les", - "Ġlat en", - "Ġç ünkü", - "ĠV ader", - "æı Ľ", - "Ġvard ı", - "ĠIst anbul", - "gr é", - "ĠEl sa", - "ë l", - "Ġinve ce", - "Ġcr ane", - "Ġo be", - "ĠSh ark", - "Ġsm ack", - "Ġrest oring", - ". \\", - "Ġë¹ łë", - "Ġf aded", - "um bers", - "S inging", - "Ġdep ressing", - "th est", - "ĠW ahr", - "Ġmult itude", - "ÑĢавÑģÑĤв ÑĥйÑĤе", - "rij k", - "ek a", - "Ġcomplet es", - "ĠWell s", - "Ġro y", - "ĠPr ay", - "ĠKal au", - "iz in", - "iaÅĤ em", - "Ġlo com", - "ĠNash ville", - "ĠPent agon", - "ë ¯¸", - "ĠNE W", - "Äħ Äĩ", - "ÃŃ ss", - "Ġmarry ing", - "Ġfe ud", - "íĻ ķ", - "æĢ ¥", - ") !", - "ĠOper ations", - "Ñĥ ÑĶ", - "Ġmo je", - "Ġinstruct ed", - "ĠëĪĦ 구", - "Ġ×Ķ× Ĵ", - "ĠпомоÑī ÑĮÑİ", - "Ġsab ia", - "ìķĺ ìĸ´ìļĶ", - "pl ane", - "p ri", - "Ġпол ноÑģÑĤÑĮÑİ", - "ĠK itty", - "Ġpróp rio", - "ed ere", - "Ġinteres ante", - "Ġд е", - "Ġcond ensed", - "Ġav ent", - "T OR", - "Ġgre asy", - "AR K", - "ort a", - "A J", - "Ġdis reg", - "Ġcorrect ions", - "Ġst ero", - "Ġinfluen za", - "Ġdess es", - "Ġball ots", - "Ġme get", - "Ġma fia", - "Ġb öl", - "n ost", - "ĠÑģÑĤ аÑĤÑĮ", - "Ġrespond er", - "Ġhint en", - "g rav", - "à¸Ń ะ", - "yn chron", - "Ġvi ens", - "Ġsam o", - "Ġd t", - "pan nt", - "ĠÅĽwi at", - "Ġзап иÑģ", - "Ġmer ged", - "Ġke p", - "Ġmis leading", - "Ġdig amos", - "Ġam mon", - "è¾ Ľ", - "ch et", - "Ġê°Ģ ìł¸", - "Ġun i", - "ĠëIJĺ ëĬĶëį°", - "Ġнап ÑĢав", - "ĠкоÑĤоÑĢ ого", - "Ġanim ate", - "×ķ× IJ×", - "еÑĢ в", - "Ġmin ced", - "Ġka um", - "ãģĤ ãģģ", - "ÏĢ ε", - "л ег", - "exist ing", - "Ġplata form", - "ĠK RIS", - "ìĽ ł", - "ĠFamil ien", - "ĠLib ya", - "Ġbiod iversity", - "Ġidi ots", - "ird i", - "Ġszy b", - "ĠRoll ing", - "ü cht", - "ĠÑĥд ив", - "Ñģ Ñĥд", - "Ġreal izar", - "Ġcan ned", - "ĠÑĢ ан", - "Ġmet abolic", - "ĠBe ef", - "Ġkil ka", - "лÑİ Ñģ", - "Ġreg istry", - "моÑĤÑĢ иÑĤе", - "Ġviel ä", - "Ġod c", - "Ġcondem ned", - "æ© ĭ", - "f al", - "ĠD il", - "wo ÅĽci", - "A w", - "Ġstatist ically", - "Ġso gen", - "ĠB ETH", - "Ġsh aving", - "å¹ ¸", - "oc al", - "ĠFun ny", - "Ġpeace fully", - "Ġaddict ive", - "ĠIns ert", - "la uf", - "Ġexperien cia", - "é¦ĸ åħĪ", - "иÑĤ елÑı", - "ÃŃ gen", - "ág ina", - "Ġabdom en", - "íķľ ëĭ¤", - "ic us", - "im ana", - "ì į¨", - "arch ing", - "Ġkonk ret", - "ìķ ĺë", - "ек а", - "ou fl", - "ive l", - "Ġn ude", - "èt res", - "Ġm onsieur", - "Ġcl ash", - "Ġtherap ists", - "Ġcub ed", - "Ġretrou ver", - "Ġwave form", - "Ġpot em", - "ĠForm er", - "is ión", - "åº ľ", - "Ġ×IJ× Ŀ", - "und os", - "ĠMein ung", - "ص ÙĦ", - "ĠJ ude", - "Ġn Ã¥r", - "ĠLeon ardo", - "ĠCr isto", - "ĠG OT", - "ÑģÑĤÑĢÑĥ к", - "L AN", - "Ġg Ã¥ng", - "Ġdé b", - "ĠFrankf urt", - "Ġcra ppy", - "Ġli l", - "ann ée", - "ĠмеÑģÑĤ е", - "RE T", - "ĠN er", - "ĠCO STA", - "Ġjed em", - "Ġcurt ains", - "Ġiter ations", - "Ġun av", - "Ġpla que", - "or um", - "ĠÎ ¶", - "Ġnúmer os", - "Ġdes ap", - "² ½", - "Ġcomp iled", - "Ġref le", - "Ġrank ings", - "Ġrep aired", - "ĠÐĿап ÑĢ", - "Ġdownload s", - "Ġarm our", - "Ġ×Ļ ×ķתר", - "Ġlonge vity", - "ĠTON ER", - "ĠкомменÑĤ аÑĢ", - "Ġcz ego", - "Ġnot ify", - "Ġairport s", - "Ġend uring", - "let te", - "Ġapp arat", - "Ġhab il", - "á»ĩ c", - "n ad", - "IC O", - "ĠBra h", - "Ġseg ún", - "Ġgovern ors", - "k aha", - "ĠSchl uss", - "Ġodpow ied", - "ir ting", - "Ġrem pl", - "ĠAb original", - "ident ally", - "Ġenhan cing", - "lic ting", - "ĠHawai ian", - "Ġstri ving", - "ĠN iet", - "Ġzn aczy", - "Ġobed ience", - "ĠnÃ¥ got", - "Ġexp ired", - "Ġ19 18", - "pres ented", - "Ġpr owad", - "ĠTer r", - "ĠPrinc eton", - "Ġmor gen", - "Ġattract ing", - "ĠS igma", - "ign er", - "ĠRe chts", - "ĠP eki", - "Ġmet hy", - "Ġha mm", - "Ġdire ito", - "Ġdeleg ation", - "ив аÑİÑĤ", - "Ġg in", - "You ng", - "Ġdepend encies", - "ĠBrad ley", - "bud s", - "Ġf is", - "Ġpyt anie", - "Ġinterconnect ed", - "Ġemba ixo", - "ĠS as", - "Ġr uh", - "ĠS icht", - "S ur", - "Ġsuper b", - "ĠSabb ath", - "ĠD anger", - "k ol", - "Ġh ou", - "s upp", - "ĠN acional", - "Ġsuccess ion", - "Ġv á", - "ĠMaÃŁ nahmen", - "ĠJess ie", - "ĠId aho", - "fore st", - "ħ ĺ", - "Ġ×ŀ× ĵ", - "ĠØ£ ÙĬ", - "Ġsweet heart", - "Ġneat ly", - "ĠEv angel", - "ê³ ¡", - "ĠSu ite", - "úblic a", - "ĠÑĥ ли", - "ĠAnn ouncer", - "l igh", - "Ġsens ations", - "Ġshel ters", - "Ġh art", - "Ġsqueez ing", - "ĠR ivers", - "ĠCook ing", - "ì± ħ", - "person al", - "Ġman os", - "ÑijÑĤ ÑģÑı", - "w ij", - "Ġgo gg", - "ĠMill i", - "ĠF P", - "ün st", - "ĠL S", - "Ġspray ing", - "Ġf aux", - "Ġaut ograph", - "olog ic", - "Ġtor ment", - "Ġencry pted", - "á» ħ", - "Ġest re", - "ç¹ ¼", - "à ±", - "Ġst umbled", - "Ġa ider", - "Ġsab en", - "x ter", - "ĠC ities", - "ĠTür k", - "ëĭ ¥", - "ch ine", - "Ġto pping", - "Ġpoison ed", - "ĠRoman ia", - "×ĵ ×Ļ", - "Ģë ¡ľ", - "ĠпоÑĢ Ñıд", - "Ġchir ping", - "ĠìĻ Ħë", - "×ij× ¢", - "Ġcu anto", - "Ġdon ating", - "ĠReg ent", - "ĠBer uf", - "Ġdistract ing", - "Ġstam ina", - "ĠDar ren", - "Ġì¶ ķ", - "l ists", - "d al", - "ch uss", - "Ġeconom ist", - "ãģĪ ãĥ¼", - "org t", - "Ġist iyorum", - "è¿ Ľ", - "ĠSur prise", - "ĠHa o", - "Ġìµľ ê³ł", - "ĠG W", - "ĠIn ner", - "Ġqu ieren", - "Ġmind ed", - "Ġsupercom puter", - "Ġdiagram s", - "íĬ ľë", - "ê²ł ìĸ´", - "ĠобÑĬ ÑıÑģ", - "Ġestab an", - "Ġdestro ys", - "ĠBre aking", - "Ġkar Ä±ÅŁ", - "Ġrebuild ing", - "ľë ĮĢ", - "ли во", - "ĠSau ce", - "ĠF usion", - "×ķ× ŀ×", - "ĠQu inn", - "Ġga uche", - "ĠÙĪ Ø£", - "Ġ È", - "ç ĵľ", - "Ġtechn o", - "Ġdisp atch", - "ĠaÅŁ k", - "Ġein zel", - "ĠG mail", - "ç ŀ", - "Ġê°ľ ìĿ¸", - "ĠÑģем ÑĮ", - "Ġjour neys", - "Ġi ht", - "Ġfib re", - "Ġdram as", - "ouch ed", - "Ġren ame", - "Ġоп еÑĢ", - "Ġpo o", - "ĠD ru", - "ĠиÑĤ ог", - "Ġz ast", - "Ġco z", - "Ġz ucch", - "Ġobt aining", - "Ġcomm ute", - "Ġsub mer", - "ĠV ish", - "ĠR abb", - "og g", - "Ġh ut", - "íĸĪ ìĸ´", - "æ¯Ķ å¦Ĥ", - "ere mi", - "Ġμ α", - "Ġdisk ut", - "Ġб Ñĥк", - "Ġimp aired", - "d epend", - "ĠÙĪ ا", - "ĠÑĢ Ñĥк", - "Ġб аÑĢ", - "Ġoxid ation", - "Ġsitu ação", - "ÉĻ n", - "u ção", - "Ġsag te", - "ĠS ER", - "ĠC ake", - "Ġtur meric", - "ĠK ak", - "b ung", - "ĠK á¹Ľá¹£á¹ĩa", - "Ġpoison ing", - "Ġsl ipping", - "ĠS ays", - "å°± åı¯ä»¥", - "ò ng", - "çŁ ³", - " «", - "ĠClaud ia", - "ĠChar acter", - "ни ÑĨ", - "co at", - "Ġprogress ed", - "ĠFer gus", - "Ġìĺ¤ ëĬ", - "Ġo at", - "ord able", - "ĠLe y", - "ĠHera us", - "Ġresult ados", - "ĠKay la", - "Ġr iff", - "Ġcheg ou", - "Ġx i", - "Ġsp acious", - "Ġrecogn ised", - "Ġe ch", - "ĠT ie", - "Ġlaunch er", - "J im", - "Ġsupp ression", - "ĠImp ossible", - "Ġguit ars", - "ĠFour ier", - "иÑĩеÑģ кий", - "ĠTh erap", - "ĠK af", - "cent ered", - "ĠÑģо оÑĤвеÑĤ", - "Ġk lim", - "Ġcarbohyd rates", - "ign ant", - "ĠAst ron", - "Ġem ple", - "Ġdr astic", - "ĠмиÑĢ е", - "в ин", - "u w", - "Ġpret tier", - "Ġdon uts", - "ĠAth ena", - "Ġdiss ert", - "Ġpl ante", - "Ġur anium", - "ìĿ Įë", - "ar é", - "Ġrze cz", - "Ġdisplay ing", - "æĪ ²", - "Ġsar c", - "r ão", - "Ġtamp oco", - "Ġphilosoph ers", - "ĠRe cht", - "æĵ ļ", - "Ġcoment arios", - "y se", - "Ġìľ ¤", - "Ġm ise", - "ĠG in", - "Ġн ом", - "ĠFR OM", - "l iner", - "at if", - "Ġspo ÅĤec", - "x a", - "ĠÑĤ ÑĢÑĥд", - "Ġw ag", - "기 ìĹIJ", - "ĠM G", - "Ġoff spring", - "ĠUnder standing", - "åıª æĺ¯", - "OR A", - "Ġwh irring", - "Ġsur rend", - "Ġpok er", - "Ġmon uments", - "ĠâĻ ©", - "Ġorgan ised", - "ĠSo zial", - "ĠF actory", - "Ñħ а", - "Ġrese mble", - "з д", - "Ġexplos ions", - "Ġpay roll", - "Ġom n", - "ĠJ orge", - "ι Ïĥ", - "Ġfract ure", - "Ġpersec ution", - "Ġdem ais", - "E CH", - ", )", - "Ġcri ar", - "ĠJ OSH", - "Ġdem ographics", - "Ġ16 00", - "Ġcur rencies", - "ĠT ips", - "Ġ éĢĻåĢĭ", - "ĠRe fer", - "ĠDan cing", - "Ġincons istent", - "Ġde h", - "Ġimm ens", - "Ġme ist", - "Ġimpat ient", - "Ġbehav es", - "æĿ ¾", - "ĠëĤ´ì ļ©", - "Ġback story", - "Ġagree ing", - "ĠÅ ģ", - "ih in", - "Ġtemper atura", - "ĠBack ground", - "Ġnut zen", - "Ġëħ ¹", - "ĠM änner", - "Ġcollabor ations", - "ĠK os", - "éģİ åİ»", - "Ġnight mares", - "ë ĵ±", - "ĠQueens land", - "Ġassoci ates", - "ĠK ok", - "Ġfact orial", - "ĠHy ung", - "Ġê·¸ ëĭ¤ìĿĮ", - "Ġfil ho", - "Ġel ét", - "Ġíĸī ë³µ", - "° ±", - "Ġgef unden", - "Ġsemic ondu", - "Ġcounsel ors", - "ĠU pper", - "ĠA ub", - "ick ers", - "V er", - "Ġnorth west", - "ĠMainten ant", - "ĠL akes", - "аÑı в", - "int é", - "ì° ½", - "Ġг аз", - "Ġgi orn", - "Ġdigit ally", - "ĠCirc uit", - "ì¼ Ģ", - "ãĤĬ ãģ¾ãģĹãģŁ", - "Ġcheer ful", - "ĠPet erson", - "ĠDan ish", - "ativ os", - "Ġli ken", - "Ġhar bor", - "али ÑģÑĤ", - "x e", - "Ġcur ls", - "ĠR hod", - "E nd", - "ĠE T", - "Ġacqu aint", - "ĠKel vin", - "Ġtr if", - "ĠA way", - "ìŀIJ ëĬĶ", - "v s", - "Ġp ágina", - "Ġin let", - "ĠSant os", - "Ġìļ° ìĻĢ", - "Ġyap ıyorsun", - "th eme", - "Ġsou ff", - "Ġinject ed", - "Ġpó źniej", - "iver so", - "amp ed", - "Ġda her", - "Ġd agger", - "ĠлÑİб им", - "Ġt ummy", - "Ġenlight ened", - "c ents", - "ĠD ah", - "Ġcu est", - "ä¾Ĩ 說", - "IL Y", - "Ġ×ij ר", - "Ġbang ing", - "ĠEm il", - "ĠC ler", - "ĠB order", - "иж Ñĥ", - "Ġpresent ers", - "ĠST UD", - "co ins", - "ĠíĻ į", - "Ġper ks", - "Ġpar ap", - "Ġcertain es", - "ĠL ore", - "ö st", - "ĠMAR TIN", - "Ġb ios", - "Ġwhere by", - "ver ts", - "ĠMir anda", - "Ġst ip", - "æ¾ ¤", - "and ez", - "׼ ׾", - "uj in", - "Ġê ¾", - "Ġaller gies", - "pl ate", - "Ġyap ıl", - "Ġundert ake", - "ĠëĤĺ ê°Ģ", - "P art", - "Ġkız ım", - "h guru", - "ãģĤ ãģ¨", - "ĠJohn s", - "Ġeyel ashes", - "Ġdra ined", - "Ġst Ã¥r", - "ãģĤãĤĬ ãģ¾ãģĻ", - "ĠJ ade", - "Ġcal end", - "fil m", - "Ġmes a", - "Ġlud zie", - "Ġattract s", - "Ġju ices", - "Ġк ил", - "Ġnieu we", - "Ġmen cion", - "Ġign ition", - "Ġbl adder", - "anda ag", - "ĠExt ension", - "íĤ ¨", - "fe ed", - "ĠÙĪ Ùĩ", - "Ġsp un", - "Ġt ät", - "оÑĢ оÑĤ", - "ty ard", - "ron ics", - "ĠH uge", - "Ñĥж д", - "st ring", - "Ġun just", - "Ġpra wn", - "Ġfrost ing", - "Ġdisappear ance", - "ios a", - "Ġcard i", - "ĠPri est", - "Ġcient ÃŃfic", - "åĵª 裡", - "ĠÐĴ аÑģ", - "Ġë¶Ģ íĥģ", - "Ġth ieves", - "Ġphys ique", - "ĠE ugene", - "Ġбли з", - "Ġmon opoly", - "Ġbi ography", - "Ġho ÅŁ", - "Ġt ö", - "m ac", - "Ġshock s", - "ìĦ ¸ë", - "h it", - "Ġsn ug", - "Ġinc l", - "Ġded ic", - "Ġult ras", - "Ġизв еÑģÑĤ", - "Ġutil ization", - "ĠÑģовеÑĢÑĪ енно", - "Ġserv i", - "st ag", - "1 80", - "Ġse wer", - "ĠCh oice", - "Ġdis charged", - "ĠJ D", - "ол еÑĤ", - "ĠкваÑĢ ÑĤи", - "Ġteles cop", - "ĠJe ÅĽli", - "ĠN ana", - "c ale", - "ĠÑĤ он", - "mm m", - "äºĨ åIJ§", - "Ġge habt", - "ëĤ ł", - "æĬ ķ", - "à¸Ļ à¸Ļ", - "Ġet her", - "Ġz en", - "Ġresearch ed", - "ĠCzy li", - "å®Į åħ¨", - "work ers", - "Ġê²½ ì°°", - "Ġsher iff", - "all o", - "Ġtip os", - "Ġprosec ution", - "Ġfrog s", - "Ġf alt", - "j d", - "ĠíĮ Ķ", - "Ġfilter ed", - "ĠO ft", - "Ġì į", - "Ġdis fr", - "ĠMust ang", - "Ġwo ah", - "ĠRE ALLY", - "Ġмог ли", - "Ġentr ada", - "Ġиг ÑĢа", - "Ġmix es", - "ĠавÑĤом об", - "Ð Ļ", - "Ġsh in", - "Ġparan ormal", - "Ġsome place", - "Ġdish on", - "eta an", - "Ġfu erte", - "Ù ¹", - "Ġdo om", - "ìĪ ľ", - "Ġexist ential", - "Ġbu ld", - "ĠSD K", - "ĠпÑĢав да", - "Ġturn over", - "ĠìĹ¬ê¸° ìĹIJ", - "Ġठ¹", - "Ġmodel ed", - "Ġbug ün", - "Ġexperiment ation", - "Ġmorning s", - "Ġmed o", - "Ste vie", - "Ġplay able", - "Ġairl ines", - "g ments", - "Ġê¸°ë ¶Ħ", - "ĠT omb", - "ĠMV P", - "AUDI ENCE", - "Ġcheck out", - "Ġpas st", - "Ġbe ispiel", - "ĠLink s", - "he avy", - "Ġquestion able", - "Ġìĵ °ë", - "Ġs ill", - "Ġmanip ulated", - "ĠL oren", - "Ġìľ ¼", - "Ġver ge", - "á k", - "I ES", - "Ġsab ot", - "ĠCustom er", - "ale ży", - "Ġnom inee", - "ĠG ad", - "Ġnouve lles", - "ĠS PE", - "ist ling", - "Ġo val", - "обÑĢ аж", - "if ty", - "éĩ İ", - "Ġbez el", - "y et", - "Ġfre ight", - "ĠHan ım", - "r ÃŃa", - "Ġz oning", - "Ġind em", - "ĠB ü", - "Ġfemin ism", - "Ġvo ix", - "Ġof icial", - "Ġdi yorum", - "» IJ", - "Ġar ose", - "Ġpar ar", - "ìĿ¸ ì§Ģ", - "ĠMart ine", - "ĠL ect", - "Ġrest er", - "Ġdrown ing", - "u ya", - "c ida", - "ĠAri el", - "Ġ0 2", - "Ġ×Ķ ×Ķ", - "ç´ ł", - "ĠW ert", - "Т Ñĭ", - "Ġwid ow", - "Ġparch ment", - "Ġcott age", - "ĠX L", - "ĠSl ack", - "ĠN ES", - "Ġro be", - "Ġg imm", - "Ġcam inho", - "ĠHar per", - "Ġcit rus", - "Ġfirefight ers", - "Ġdop amine", - "el ets", - "Ġdemocr at", - "ìł ľë¡ľ", - "Ġplay back", - "o j", - "ĠпÑĢ ок", - "ĠSull ivan", - "se mble", - "ĠW orth", - "ĠMust afa", - "า ร", - "Ġmet s", - "éĸ Ģ", - "л оÑģÑĮ", - "Ġinert ia", - "Ġuniform s", - "è¶ ³", - "é rio", - "×ķר ×Ķ", - "é nt", - "Ġà® Ĵ", - "ĠÑģам ÑĭÑħ", - "Ġvou lais", - "ĠZ immer", - "ê² łë", - "Ġн оÑģ", - "en cias", - "Ġrel ación", - "Ġê± ¸ë", - "Ġfact ion", - "Ġg osp", - "пол ож", - "n ap", - "h ak", - "Ġproceed ings", - "ĠìĨ Ķ", - "ìķĦ ëĭĪ", - "ĠìŀIJ 기", - "Ġwer d", - "Ġso f", - "Ġsch lim", - "Ġfl avored", - "Ġquad ratic", - "ĠBo ot", - "Ġpublic ity", - "ĠCar o", - "Ġ ?\"", - "ни ÑĨа", - "man ia", - "ĠS UR", - "ĠB UR", - "l ance", - "ét ica", - "Ġzob aczy", - "Ġtri o", - "s ama", - "Ġta ÅŁ", - "Ġas ymm", - "ress er", - "Ġت ع", - "Ġп еÑģ", - "Ġbeginning s", - "lad ım", - "ĠбÑĭ ÑģÑĤÑĢ", - "Ġmo o", - "ĠGene va", - "Ġ åľ¨", - "er us", - "bor ah", - "Ġref using", - "b ull", - "ĠWait ing", - "ĠInd ividual", - "Ġan onym", - "im ens", - "Ġmed idas", - "Ġfragr ant", - "Ġdirect ement", - "ĠìķĦ ë§Ī", - "ur ia", - "Ġsp herical", - "Ġab ge", - "ĠVictor ian", - "Ġspect acle", - "ĠRodrig uez", - "Ġoc up", - "ĠN är", - "mark s", - "ng ulo", - "ĠLu ci", - "Ġshout ed", - "Ġregul ators", - "ÄŁ ini", - "Ġdis ent", - "ĠÑĢÑĭ н", - "ëĤ ¨", - "ĠìĤ ´ë", - "Ġprobl èmes", - "ĠF inger", - "asse mble", - "Ġpe ar", - "Ġdro ite", - "ĠEvery where", - "t am", - "оÑĤ ив", - "в ой", - "ordin ate", - "ĠL ak", - "Ġm Ỽi", - "ĠTele vision", - "Ġexpon entially", - "av as", - "Ġble v", - "ĠM T", - "ä¿ º", - "Con nell", - "ĠêµŃ 민", - "ĠÑģво им", - "Ġach a", - "ĠD ynasty", - "J in", - "Ġto re", - "Ġfl or", - "Ġмног ие", - "æ²Ĵ äºĭ", - "ow an", - "b ah", - "Ġì£ Ħ", - "ĠC ela", - "Ġìµľ ê·¼", - "Ġpermett re", - "Ġab ras", - "Ġverste hen", - "Ġesc ort", - "ĠThe m", - "är ke", - "por ter", - "Ġkah kaha", - "Ġhe ct", - "Ġda u", - "w ah", - "ol ve", - "ĠAg es", - "s chaft", - "ĠSt ell", - "ne lle", - "ĠEn suite", - "ĠÐĴÑģ ем", - "Ġcr éd", - "ĠP P", - "l ords", - "gr unting", - "Ġcontract ion", - "G ot", - "Ġacqu iring", - "Ġso pr", - "Ġpoison ous", - "R NA", - "Ġan ar", - "ĠH of", - "' )", - "Ġremark ably", - "Ġintern acional", - "ü cke", - "in qu", - "Ġdu y", - "Ġbeast s", - "ĠL AN", - "Ġpreced ent", - "ĠRP M", - "åij ¨", - "Ġsel on", - "Ġmort e", - "Ġcomeç ou", - "Ñı ла", - "Ġinterpre ting", - "ĠBur ke", - "ÑĤ ÑĢа", - "ĠìĿ´ë Ł¬", - "Ġpess im", - "ĠN ok", - "íĮ Ŀ", - "F emale", - "Ġìĭ ¤í", - "Ļ Ģ", - "Ġstim ulation", - "Ġsl ick", - "Ġê°Ģ ëĬĶ", - "Ġк аз", - "ĠH BO", - "Ġpap ier", - "Ġkön nten", - "Ñĥб ли", - "ĠConst ant", - "SPEAK ING", - "Ġktó rÄħ", - "Ġcos metics", - "ĠT rend", - "Ġrob bery", - "Ġt itt", - "Ġgj ort", - "Ġdiet ary", - "ł Į", - "ĠKir by", - "ĠпÑĢимеÑĢ но", - "Ġqual ification", - "Ġìķ ī", - "Ġcabin ets", - "Ġhtt p", - "ĠEric a", - "ç¾ ©", - "Ġdisadvant ages", - "Ġch attering", - "y z", - "fe it", - "Ġgu ild", - "ĠE TF", - "ĠDrag ons", - "ĠH ERE", - "vent h", - "ÙĦ اÙħ", - "Ġmarch é", - "D am", - "Ġphot on", - "Ġest able", - "M ag", - "Ġol har", - "Ġcou pling", - "ĠHil fe", - "ĠW izard", - "Ġм ало", - "hel p", - "ĠlÃŃ nea", - "Ġì «", - "Ġstand alone", - "Ġmor ale", - "Ġzwe ite", - "ãĤĪãĤį ãģĹãģı", - "ähr t", - "Ġd otted", - "Ġdri pping", - "ĠFl ag", - "éĿ Ĵ", - "ro cket", - "rate gy", - "ir im", - "Ġíķĺë ©´ìĦľ", - "Ġsogen an", - "ĠUn o", - "ĠSch utz", - "Ġest ilo", - "ĠS ubs", - "ĠDais y", - "ÐĿ еÑĤ", - "' ...", - "Ġplat inum", - "Ġb irl", - "ĠSo vi", - "Ġviol ate", - "Ñĥ еÑĤÑģÑı", - "r ill", - "Ġtra z", - "Ġsn ip", - "Ġcum pl", - "à¸Ń à¸ģ", - "Ġc uk", - "éħ Ĵ", - "ĠParl ament", - "Ġhyper t", - "Ġpul p", - "Ġtong ues", - "at to", - "Ġbus ca", - "ih n", - "ER O", - "ĠÙĬ ع", - "Ġvari as", - "ĠMar ian", - "Ġbound ed", - "Ġpitch ing", - "Ġdefic iency", - "ĠBless ed", - "ĠEx erc", - "uch s", - "ĠnhÆ° ng", - "æľ¬ å½ĵ", - "Ġrap ed", - "h ales", - "Ġmal a", - "p ic", - "Ġ40 1", - "ÅĽ niej", - "ar ina", - "ëĵ¤ ìĿĦ", - "ott i", - "Ġдол го", - "Ġtrack er", - "ĠShel by", - "Ġvan ished", - "Ġbak ery", - "Kap ı", - "J esus", - "ĠK R", - "J O", - "ħ ¸", - "Ġdisc s", - "ìĦ ¯", - "ì§Ģ ë", - "×Ļ× ¦", - "em ary", - "K endra", - "Ġy ük", - "ück t", - "Ġv az", - "Ġk up", - "akt u", - "ĠÑģп аÑģибо", - "Ġa ik", - "Ġnurs ery", - "Ġendanger ed", - "êm ement", - "emat ics", - "Ġrespond ers", - "ĠRepresent atives", - "Ġsculpt ures", - "ig keiten", - "Ġde pl", - "Ġinterpret ations", - "Ġdead lines", - "Ġ194 2", - "à Ĺ", - "Ġsug ars", - "em u", - "l ively", - "Ġrecre ational", - "Ġdist ort", - "Ġunders core", - "Ġun quote", - "Ġsaf est", - "Ġsw ollen", - "Ġanalys es", - "Ġcommen cé", - "å¦ ¹", - "and in", - "ĠÐ¥ оÑĢоÑĪо", - "Ġdi arr", - "ãģ¾ ãģģ", - "zi est", - "Ġtooth brush", - "éł» éģĵ", - "u ations", - "Ġc ade", - "Ġbackl ash", - "h ind", - "Ġris que", - "z ess", - "ĠìĿ´ìķ¼ 기", - "Ġesper ar", - "Ġtransl ations", - "ion ed", - "gro ans", - "Ġп ÑĥÑĤ", - "Ġgen etically", - "éĢ ł", - "Ġhapp iest", - "Ġwer k", - "ato on", - "Ġmus i", - "Ġfun ção", - "Ġìŀħ ëĭĪëĭ¤", - "ĠÑĢ ай", - "Ġbe vor", - "BL ANK", - "Ġrepent ance", - "P ut", - "Ġpotrze b", - "Ġsal a", - "Ġcamp a", - "W ER", - "Ġdec ÃŃa", - "Ġsécur ité", - "ĠAppreci ate", - "Ñĩ и", - "ĠR andom", - "ë³ Ħ", - "k ah", - "Ġmö j", - "Ġsä ger", - "Ġ×Ļ ׼×ķ׾", - "Ġ19 0", - "xt ures", - "E u", - "Ġg ä", - "Ġ×ij× ª", - "ĠC roat", - "ap o", - "P LE", - "Ġpersist ence", - "åĬ ©", - "Ġbl ends", - "Ġtre ffen", - "ĠSanti ago", - "yd ia", - "al do", - "ĠTensor Flow", - "ĠD ual", - "ãĥ ľ", - "Ġch iff", - "ìĹ ´", - "Ġcontract ed", - "Ġseg reg", - "ĠFair y", - "Ġwis ely", - "Ġvulner abilities", - "Ġhand held", - "Ġgad gets", - "Ġbo ÅŁ", - "ĠPop ular", - "Ġcurv ature", - "ë ¬¸", - "ĠMAR Y", - "ìĿ´ì Ĭ", - "Ġform ulation", - "Ġcel ery", - "Ġblur ry", - "ĠT S", - "ale z", - "Ġw s", - "Ġprogram m", - "ĠSt ack", - "ĠJ IM", - "ов али", - "ı ll", - "Ġp ère", - "ĠKan ye", - "ĠDel aware", - "Ġãģ ł", - "Ġda unting", - "Ġб еÑģ", - "ĠSt upid", - "b ig", - "ffic ial", - "Ġprecip itation", - "Ġpl ung", - "ụ c", - "bur se", - "Ġdar le", - "Ġcri pp", - "Ġpione er", - "Ġdis put", - "Ġse an", - "ãģĵ ãĤĵãģª", - "Ġresist or", - "Ġalle in", - "ipp les", - "are l", - "Ġend ors", - "z ust", - "ĠÑĢеб ÑıÑĤа", - "ed ed", - "Ġì¹´ë ©Ķë", - "Ġlle va", - "Ġken nt", - "Ġб ал", - "ĠDoc ument", - "ĠKn ights", - "Ġbuck le", - "Ġìī ¬", - "Ġal k", - "ĠEvery day", - "atter s", - "Ġtoil ets", - "Ġj ugar", - "ĠìŀĪ ì§Ģ", - "Ġgen auso", - "ĠLandes regierung", - "ãģ£ãģ ±", - "ij e", - "Ġtrail ers", - "ĠT igers", - "Ġg itti", - "Ġforg iving", - "Ġconcur rent", - "ĠV u", - "ĠíĬ¹ íŀĪ", - "ĠBR OWN", - "ound ed", - "\" ;", - "Ġtre mb", - "Ġt iet", - "ĠÑĢеж им", - "Ġnuts hell", - "ел иÑĩ", - "Ġlos ers", - "ric ting", - "Ġrede em", - "def ined", - "N ice", - "Ġbroad band", - "K O", - "Ġte asing", - "Ġpart isan", - "ı ma", - "Ġìŀ¬ë ¯¸", - "ĠJour ney", - "Ġslop es", - "un ing", - "gr unts", - "Ġt äll", - "Ġuncover ed", - "Ġmy ÅĽlÄĻ", - "ĠEst her", - "äº İ", - "ĠHealth y", - "Ġë° ij", - "r ée", - "Ġpolar ization", - "Ġfl av", - "Ġcambi ar", - "Ġy r", - "ĠR anch", - "Ġspl its", - "Ġtrou vé", - "åľĭ 家", - "Ġrecord er", - "Ġdé part", - "ÙĪ ب", - "ĠK ry", - "Ġinteress ant", - "Ġeder im", - "ÅĽ wiad", - "il ateral", - "w right", - "Ġpour ra", - "ê ter", - "Ġcam el", - "á ŀ", - "Ġrapid ement", - "Ġme j", - "Ġstiff ness", - "AD AS", - "Ġdiff ers", - "Ġal ot", - "ĠS ig", - "ÑıÑĤ елÑĮ", - "Ġabstract ion", - "åľ ĺ", - "Ġke iner", - "gr upp", - "ĠSher lock", - "íĺ Ķ", - "Ġc ite", - "Ġover flow", - "Ġt ại", - "ú car", - "b ula", - "Ġconjun to", - "ĠC I", - "Ġmoder ator", - "Ġindirect ly", - "Ġalle ine", - "â Ĥ", - "ÑĪ иб", - "Ġб аб", - "Ġdan ach", - "Ġ19 39", - "Ġpr omet", - "Ġdest inations", - "ĠIll ust", - "ικ ÏĮ", - "Ġsab es", - "Ġhe h", - "ĠGesetz ent", - "ĠM iz", - "ен ко", - "ĠM ys", - "Ð ¬", - "ĠJuda ism", - "Ġmust ache", - "Ġst immt", - "ĠG aza", - "Ġvol te", - "Ġnu o", - "Ġm ón", - "ĠCom put", - "ู à¹Ī", - "ĠR adi", - "Ġexception ally", - "Ġassum es", - "éĸĭ å¿ĥ", - "ãģĪ ãģ°", - "in form", - "Ġshr ine", - "æĵ Ĭ", - "Ġimplic ation", - "ĠF itz", - "æ²Ĵ éĹľä¿Ĥ", - "! .", - "Ġl t", - "Ġall oy", - "Ġeth ic", - "Ġmonaster y", - "ìĭľ ì£ł", - "ica ção", - "Ġcoordin ating", - "ĠM oto", - "Ġover look", - "Ġcho is", - "Ġantibiot ic", - "ĠMin ne", - "ĠB J", - "ĠA pa", - "or ian", - "Ġsp illed", - "J am", - "Ġhus bands", - "Ġcre ations", - "Ġa ñ", - "üs sel", - "ĠìĿ´ì ļ©", - "Ġanaly se", - "r ose", - "Ġpunch ed", - "Ġpres que", - "Ġastron omy", - "Ġschwier ig", - "ĠEb ola", - "Ġc is", - "Ġac et", - "ĠF X", - "end re", - "ĠìĿĮ ìķħ", - "Ġweb page", - "Ġfre aked", - "Ġlat te", - "Ġì¿ ł", - "Ġë¨ ¸ë", - "N ever", - "G ra", - "íĻĶë ¥¼", - "ey ed", - "Ġë°ľë Ŀ¼", - "Ġesper a", - "Ġapare ce", - "ra ção", - "Ġdisrupt ive", - "ĠJo int", - "ur ous", - "re as", - "Ġquer ÃŃa", - "Ġdistrib utions", - "Ġexpon ent", - "ì¹ ĺ를", - "Ġd l", - "z hou", - "ĠHe aring", - "å·® ä¸įå¤ļ", - "ĠC raw", - "Ġflo ats", - "oun ced", - "L ab", - "W orld", - "Ġbur dens", - "Ġauthor itarian", - "ĠB olt", - "Ġод нÑĥ", - "Ġpige on", - "Ġdistract ions", - "ĠHeraus forder", - "Ġz est", - "es c", - "Ġsh akes", - "at as", - "ĠÙħ Ø´", - "hol es", - "Ġthink ers", - "al ta", - "Ġar che", - "ĠS uk", - "an ha", - "Ġtempt ing", - "Ġyou tuber", - "Ġv ì", - "Ġdz iaÅĤa", - "ĠVatic an", - "P ark", - "Ġsup ers", - "ĠNik ki", - "ëĬ IJë", - "or ang", - "ram ient", - "é ¬¼", - "Ġê°ĸ ê³ł", - "Ġdessert s", - "Ġav ere", - "ĠGreg ory", - "Ġëĵ¤ìĸ´ì ĺ", - "Ġcost ing", - "ĠClin ic", - "Ġreb els", - "ĠM ob", - "Ġbun lar", - "ĠYour s", - "ert ime", - "Ġret ali", - "m ara", - "at us", - "all es", - "Ġд ÑĢ", - "Ġд иÑģ", - "Ġdiscount s", - "ĠGU Y", - "Ġкак ое", - "ĠExper iment", - "re ment", - "ĠXi ang", - "Ġb ate", - "W E", - "Ġspecial ize", - "Ġde ity", - "ĠL oki", - "m ag", - "ĠN it", - "W est", - "Ġmater nal", - "Ġqu is", - "åŁº æľ¬", - "bro ken", - "Ġlas ers", - "Ġha kk", - "ĠAng els", - "Ġmaster y", - "ant is", - "T iffany", - "ee e", - "ç ij", - "ore m", - "Ġin acc", - "Ġjurisd ictions", - "ĠKard ash", - "æľ º", - "I l", - "ĠS inn", - "åĭķ çĶ»", - "Ġathlet ics", - "c ÄĻ", - "Ġlo osely", - "Ġdiet a", - "A g", - "Ġ? ?", - "ĠëĮĢ íijľ", - "Ġsuper v", - "Ġnut rit", - "Ġdr ifting", - "ĠìĦłìĥĿ ëĭĺ", - "Ġпон Ñıл", - "ĠVict ory", - "ÙĦ Ø©", - "×ķ׳ ×Ķ", - "Ġп иÑĪ", - "Ġsh aved", - "Ġmes ure", - "ond en", - "Ùĥ ر", - "Ġex ile", - "ĠDes de", - "ĠP interest", - "Ġattach ments", - "Ġh ombres", - "Ġfin es", - "ĠìĦ¸ ìĥģ", - "Ġsleep s", - "ĠT aco", - "ĠI RA", - "ri os", - "Ġo ll", - "et es", - "Ġun ut", - "fashion ed", - "Ġtre ball", - "ĠNear ly", - "ĠÑĢе алÑĮно", - "Ġch il", - "éĢ ±", - "ÄŁ a", - "ĠM EL", - "ros cop", - "ĠC G", - "Ġv enge", - "Ġdishwas her", - "al gic", - "Ġmod ifier", - "Ġemb assy", - "t imer", - "em ics", - "Ġintric ate", - "Ġev et", - "ĠëĮĢë °ķ", - "Ġis ot", - "Ġна ÑĥÑĩ", - "ĠQu iz", - "res o", - "δ Ïİ", - "Ġye lled", - "Ġfed er", - "ELL ER", - "Ġexceed ed", - "on as", - "ic ano", - "Ġжив оÑĤ", - "ĠMa o", - "ĠKaz uto", - "Ġ ãħĭãħĭãħĭãħĭ", - "Ġfront line", - "ĠHung arian", - "Ġüber all", - "aw at", - "Ġgri ps", - "i ções", - "arn ya", - "ĠÍ ¡", - "Ġse id", - "Ġan ak", - "Ġacab ou", - "íķ ij", - "Ġnot orious", - "ĠGod zilla", - "Ġover coming", - "ĠP end", - "Ġol abilir", - "ül me", - "Ġer halten", - "ãĤī ãģĦ", - "ê· ¹", - "ĠM eter", - "Ġsta an", - "O l", - "Ġch ats", - "ĠBu enos", - "ÃŃ ve", - "alu able", - "Ġstrateg ically", - "Ġcompr ised", - "ĠпеÑĢÑģон аж", - "Ġw ann", - "ĠC en", - "н иÑĤе", - "Ł ģ", - "ĠÑĤоб ой", - "i ad", - "ĠkardeÅŁ im", - "ĠCongress man", - "ream ing", - "h omme", - "Ġcommun aut", - "Ġalcohol ic", - "Ġpick led", - "Ġac ord", - "p osition", - "eg ól", - "Ġtrou bling", - "ĠMarch eg", - "Ġzum indest", - "Ġseam lessly", - "Ġol un", - "ĠTV s", - "ĠпÑĢакÑĤи ÑĩеÑģки", - "Ġback end", - "ãģĵãĤĵ ãģ«ãģ¡ãģ¯", - "id able", - "Ġgad get", - "Ġfa ço", - "ĠMarcheg iani", - "Ġë° ¤", - "Ġaccident al", - "ĠL P", - "Ġeld est", - "ĠAd miral", - "Ġn Äĥm", - "le ver", - "Ġpast el", - "Ġfond o", - "Con nie", - "Ġter cer", - "Ġp act", - "ĠMont e", - "Ġme ats", - "ĠS MS", - "ĠAustral ians", - "ç ¼", - "Rh ett", - "Ġexact ement", - "Ġë¹ ¼", - "ĠM OD", - "ç ¡", - "ĠR apt", - "ĠNo ch", - "Ġab ort", - "ĠNav al", - "ĠFu ji", - "IN TER", - "Ġнов Ñĭй", - "Ġmiej sce", - "ĠIC U", - "ĠGrad uate", - "ĠGl en", - "ard i", - "ĠÈ ĺ", - "Ġsold er", - "Ġprofess ions", - "Ġorth og", - "om n", - "int rodu", - "ĠDen ise", - "ìŀIJë ¥¼", - "Ġcorrespond ence", - "AM A", - "Ġinf lict", - "Ġf and", - "ĠG ü", - "ĠÑĩ еÑĤ", - "Ġtr aced", - "Ġpat ents", - "Ġamb ush", - "Ġlot ta", - "ff er", - "ĠW agner", - "Ġimp erson", - "Ġextr êmement", - "ÙĤ ت", - "cond uct", - "A tt", - "ĠM ueller", - "ĠAl icia", - "Ġcy c", - "Ġha cker", - "Ġt ys", - "Ġha il", - "Ġз аÑıв", - "Ġpas so", - "Ġì¶ Ķê°Ģ", - "ĠÎ Ī", - "Ġpack aged", - "ĠC ynthia", - "he et", - "ä¸Ń åĽ½", - "ĠNiss an", - "ĠQuest o", - "é ¨", - "d id", - "Ġμ ια", - "ĠEll is", - "ĠAnal ysis", - "ce mos", - "Ġas eg", - "ĠMy ster", - "ĠCa o", - "Ġtu v", - "ĠIndust ry", - "주 ê³ł", - "ot al", - "Ġpeque ño", - "br as", - "Ġcompreh end", - "ĠSim pson", - "ÑģÑĤв ие", - "ocr acy", - "иÑĩеÑģ ки", - "ĠM ush", - "ĠLaur ie", - "Ġtriang ular", - "ĠPres ents", - "ĠK unden", - "ç´ ¹", - "æŃ ¦", - "ĠIs s", - "ĠDe ck", - "á»ĥ n", - "ĠDark ness", - "Ġinflamm atory", - "eremi ah", - "Ġwar med", - "vey ard", - "ĠMem ory", - "et ty", - "Ġtax payers", - "ภĵ", - "Ø ¡", - "Ġpract ise", - "ëĭ ¬ë", - "Ġdr illed", - "m Ã¼ÅŁ", - "log o", - "ĠF ach", - "¤ë ¡ľ", - "Ġübrig ens", - "Ġkon nten", - "Ġnormal mente", - "Ġarg ues", - "iling ual", - "°ë ¥¼", - "eg al", - "Ġtrava ill", - "ov y", - "а ÑĤо", - "Ġr uth", - "ĠL ights", - "Ġconsist ed", - "×ijר ×Ļ×Ŀ", - "Ġstere otype", - "Ġpay er", - "ĠRe e", - "ĠAir bnb", - "Ġdr owned", - "ĠZ oe", - "Ġcan opy", - "Ġbar r", - "Ġн оÑĩ", - "Ġpag an", - "Ġj ars", - "Ġr ê", - "er ver", - "æĪ ¿", - "ie ben", - "Ġes pect", - "ĠF i", - "Ġunw illing", - "Ġtechn ician", - "ặ t", - "m ember", - "ĠCan al", - "س Ùħ", - "Ġlie ber", - "Ġin ference", - "Ġhon oring", - "åij µ", - "ĠCamp aign", - "Ġline age", - "ĠSt ress", - "Ġvict ories", - "Ġde ja", - "× £", - "ê tes", - "bl ick", - "Ġмен ее", - "oth s", - "ĠCou ple", - "J ason", - "ĠNic olas", - "ек Ñģ", - "l ib", - "Ġher ramient", - "Ġ×IJ ×ķ×ŀר", - "Ġвид им", - "mill imeter", - "Ġsil houette", - "Ġdrive way", - "Ġcher ish", - "ãħł ãħł", - "Ġrans om", - "Ġinter disciplinary", - "ĠPort al", - "Ġtra g", - "th ood", - "Ġted ious", - "Ġgloss y", - "Ġpré par", - "ĠC ay", - "ĠT ook", - "ĠBott om", - "Ġz ig", - "å «", - "åį ±", - "re presented", - "à¹Ģล ย", - "Ġdesar rollo", - "ìĦ ľë", - "Ġvis cos", - "Ġmill igram", - "ĠG und", - "Ġfer ment", - "d rum", - "Ġdraw ers", - "La ugh", - "Ġpel os", - "Ġpave ment", - "Ġmem oir", - "av ait", - "Ġ20 50", - "¤ë ¥¼", - "Ġraz ón", - "Ġflour ish", - "Ġst ern", - "ä¸ Ī", - "ĠCh ung", - "Ġser pent", - "ĠGentle men", - "羣çļĦ å¾Ī", - "k ook", - "Ġl ut", - "import e", - "p arent", - "Ġw sz", - "Ġsc ree", - "ĠMitar beiter", - "å· ´", - "m ut", - "Ġìĸĺ 기를", - "Ġsem ble", - "ĠO W", - "Ġinvestig ator", - "ĠCher yl", - "ĠG erald", - "Ġpr ere", - "Ġcomp ares", - "ny t", - "Ġdiferen ça", - "? -", - "Ġqu á", - "ר ×Ļ", - "S en", - "Ġhe ps", - "Ġgrat uit", - "Ġcons ort", - "ĠST OP", - "ĠProtest ant", - "Ġelectro de", - "â Ĺ", - "Ġsecure ly", - "иÑĩеÑģ кой", - "Ġt ää", - "Ġreg isters", - "ĠHeaven ly", - "og ly", - "iss ä", - "ĠPhys ics", - "ĠMer kel", - "Ġré v", - "éĻ ¢", - "Ġer ased", - "ĠSac ramento", - "Ġcoff in", - "Ġex acer", - "Ġl anz", - "Ġpo ets", - "ul if", - "Ġì¹ ĺë", - "ĠN erd", - "ĠN CT", - "ĠH our", - "neh mer", - "ŀ ĺëıĦ", - "ĠPrin ci", - "S w", - "m ies", - "ar med", - "ĠBeat les", - "Ġpropag ation", - "Ġexch anged", - "Ġcum ulative", - "Ġì§ij ìĹIJ", - "Ġdefe ating", - "æĬ ±", - "b els", - "Ġw es", - "ĠOdys sey", - "ä½ł æĥ³", - "av ior", - "ĠìľĦ ìĹIJ", - "Ġbr it", - "Ġhij o", - "D AY", - "ĠاÙĦت ÙĬ", - "ĠС еÑĢг", - "Ñĥ ка", - "eds iÄĻ", - "Ġimp os", - "Ġell as", - "Ġfire arms", - "ĠN R", - "Ġ×ij× IJ", - "ĠÐŁ ока", - "aw i", - "ĠìĦ± ê³µ", - "Ġpup ils", - "ĠT ack", - "Ġfr ase", - "ĠSh ip", - "Ġst ad", - "ä¸ ľ", - "ĠGreat er", - "un un", - "imm ung", - "gr own", - "ĠN XT", - "ĠAmeric as", - "f ox", - "Ġmant en", - "éłIJ åĤĻ", - "ĠÑģ ок", - "Ġr ikt", - "lect ric", - "de ep", - "Ġзна еÑĪÑĮ", - "Ġben ut", - "ĠInf rast", - "ĠEm ir", - "ĠоÑĤп ÑĢав", - "ĠKim chi", - "ĠFinn ish", - "´ìł ģ", - "ina ire", - "Ġo ike", - "æ¸ħ æ¥ļ", - "Ġhost age", - "ĠBut ton", - "ÙĤ ÙĬ", - "ek ing", - "ĠKaz akh", - "Ġcomfort ing", - "Ġso g", - "Ġgreet ed", - "g uitar", - "p ayer", - "Ġrel ational", - "Ġconstru ir", - "çī¹ åĪ¥", - "op ian", - "ĠVol ume", - "iet h", - "ÑģÑĤв ом", - "ur rection", - "li ÅĽmy", - "Ġhem isphere", - "ĠBe an", - "IG N", - "Ġköt ü", - "ĠFall out", - "Ġbr ace", - "ç¹¼ çºĮ", - "ÏĢ ά", - "ĠH AS", - "Ġg é", - "Ġcharacter ize", - "ặ c", - "ĠMil ky", - "Ġtum ors", - "Ġn uit", - "ĠG az", - "ĠìŀĪ ëĭ¤ëĬĶ", - "Ġг аÑĢ", - "ess ment", - "ĠA be", - "Ġë½ ij", - "ĠEins atz", - "J IN", - "j ä", - "C ry", - "ĠProm ised", - "ĠÑģеÑĢ д", - "ok us", - "Ġscal able", - "ĠпоÑģмоÑĤÑĢ еÑĤÑĮ", - "ück lich", - "Ġreal ism", - "Ġmay o", - "Ġjuven ile", - "Ġhead lights", - "Ġgör Ã¼ÅŁ", - "ĠRe form", - "Ġhal ves", - "cz ne", - "Ġbreak up", - "że j", - "Ġr ätt", - "D ay", - "ĠìĿ¼ë ³¸", - "Ġmu erte", - "Ġtun es", - "ĠSm ile", - "rec ord", - "Ġrecher che", - "atisf ied", - "Ġpo zi", - "Ġcelebr ations", - "ise xual", - "ĠRO B", - "third s", - "ĠF ortune", - "ĠÑĤ ой", - "Ġbrand ed", - "lo o", - "Ġd ud", - "Ġrandom ized", - "Ġcomb in", - "ä¸Ģ äºĽ", - "ier an", - "c zenia", - "į ãĥ«", - "Ġcur ator", - "Ġar tery", - "ĠÑĥ ÑĪ", - "ĠÑĩ иÑĤ", - "Ġsubsid ies", - "Ġbloss om", - "ĠTw ilight", - "Ġhy vä", - "ĠPom pe", - "ĠC isco", - "ĠÐŁÑĢ о", - "Ġbir i", - "Ġg ern", - "Ġre built", - "Ġw cze", - "Ġbenefic i", - "Ġdrum mer", - "Ġsol ids", - "Ġdi yorsun", - "ãģĤãĤĬãģĮãģ¨ãģĨãģĶãģĸ ãģĦãģ¾ãģĹãģŁ", - "l ated", - "Ġmud dy", - "Ġh olog", - "Ġcl aps", - "ĠR ings", - "ĠO key", - "ĠBra ve", - "Ġvalu ation", - "Ġmig rant", - "Ġinter mitt", - "Ġeig ene", - "ili ary", - "ãĥ¼ ãĥĪ", - "mark t", - "k r", - "ĠR ib", - "á»Ļ i", - "Ġaccus ations", - "Ġa rab", - "w ash", - "ĠBard zo", - "Ġu gh", - "est ers", - "oph ren", - "Ġaliment os", - "ĠU z", - "Ö Ĥ", - "Ġ6 50", - "ĠпÑĢи еÑħ", - "F I", - "Ġsamp ai", - "Ġparl é", - "hes ion", - "Ġs ır", - "Ġapparat us", - "Ġcor related", - "ĠPrincip al", - "Ġcor r", - "ĠOffic ial", - "иÑĩеÑģ кие", - "Ġtermin als", - "Sh ould", - "Ġvac un", - "Ġst ellt", - "Ġmo oi", - "etz ung", - "Ġк ÑĢа", - "Ġda i", - "Ġп ож", - "Te am", - "ĠP PE", - "ĠÐŀ Ñģ", - "ĠLe ah", - "ĠI vy", - "y st", - "Ġuh hh", - "Ġnight time", - "Ġtrend y", - "Ġsec urities", - "Ġcontin ents", - "Ġfirst hand", - "ĠVer on", - "ĠëĤ ®", - "Ġbrows ing", - "ĠC ada", - "t ro", - "Ġtr amp", - "re ib", - "Ġerst mal", - "irl er", - "Ġps ic", - "Ġget ir", - "ĠN P", - "Ġdzie ci", - "об ÑĢаз", - "Ġmagic ian", - "Ġscrut iny", - "Ġsl ab", - "ĠO T", - "ist y", - "ir ies", - "ore st", - "Ġtask ed", - "Ġmor ally", - "ìķ¼ ì§Ģ", - "ust ered", - "Ġfool s", - "Ġir respons", - "Ġein f", - "Ġvi á»ĩc", - "Ġsc or", - "Ġpill ows", - "ĠG egen", - "Ġtut te", - "Ġquarter ly", - "Ġdid nt", - "ĠG ym", - "ĠE ther", - "ĠØ «", - "лиÑĪ ком", - "Ġsign aling", - "ĠN ode", - "ĠDonc s", - "Ġy ah", - "ĠKan al", - "Ġf ading", - "et in", - "Ġinfluen cers", - "Ġmed als", - "Ġengine ered", - "Ġfer mented", - "ê²ł ì§Ģë§Į", - "ĠBeet hoven", - "×ŀ× ©", - "inent al", - "ĠìķĮë ł¤", - "üt fen", - "al nya", - "Ġo vere", - "Ġden kt", - "ак ÑĤеÑĢ", - "Ġâ ĺ", - "Ġneces it", - "Ġgener ators", - "gr ass", - "Ġпод Ñĥм", - "lie ÃŁen", - "B ar", - "ľë ıĻ", - "ĠдеÑĤ ей", - "Ġsuck ing", - "Ġsten cil", - "Ġprim o", - "ĠBreat h", - "st rom", - "Ġimmens ely", - "Ġapp reh", - "ìłķ ìĿ´", - "P op", - "Ġj ong", - "ĠGi ul", - "ĠAD HD", - "Ġhö ren", - "Ġe lo", - "iv ent", - "Ġr us", - "Ġoutrage ous", - "Ġmaster ed", - "Ġì» ¤", - "ÙĪ Ùģ", - "ip es", - "ĠRud y", - "Jac ob", - "Ġbull ish", - "Ġt apped", - "Ġfa ud", - "iz ophren", - "ĠÑģо Ñħ", - "ĠDar ling", - "Ġ196 3", - "ĠPre vention", - "² Ķ", - "Ġabdom inal", - "st ones", - "Ġav aient", - "á»ķ i", - "m ake", - "Ġs are", - "ĠInst ant", - "к ам", - "Ġkeep er", - "Ġblank ets", - "ãģ§ ãģĹãĤĩãģĨ", - "Ġswe ats", - "ĠMinne apolis", - "åħ¨ éĥ¨", - "Ġgen ommen", - "Ġfast en", - "ĠBrus sels", - "åij ¼", - "Ġcaf eter", - "Ġabsor bing", - "Ġha go", - "ĠEl mo", - "Ġgust o", - "ĠY ap", - "M úsica", - "Ġt ert", - "Ġband a", - "Ġm ily", - "Ġthere after", - "ĠStock holm", - "ĠC arson", - "Ġcalib ration", - "ava ÅŁ", - "ans a", - "ik ke", - "Ġfore see", - "Ġqual che", - "Ġdest e", - "æ ¤", - "ün üz", - "Ġfor ge", - "D is", - "est en", - "Ġδ ια", - "Ġenca ps", - "ĠGes pr", - "Ġcher cher", - "ick ets", - "ÑĤоÑĢ Ñĭ", - "C r", - "ĠТак же", - "Ġrabb its", - "ĠD ot", - "he iten", - "Ġcaus al", - "ĠF oster", - "ajÄħ c", - "Ġbere it", - "Ġayud ar", - "é« Ļ", - "ãģ ³", - "s ong", - "com b", - "Ġfr inge", - "Ġcyber security", - "Ġëľ ¨", - "Ġk ier", - "Ġbesch äft", - "Ġкон ÑĨе", - "Ġfacil it", - "ĠNam en", - "Ġbil ateral", - "t x", - "ĠW issenschaft", - "Ġnu ances", - "Ġr ipping", - "Ġf y", - "ĠSicher heit", - "ĠGh ana", - "ol on", - "Ġto pped", - "ĠMoroc co", - "Ġrad ial", - "ĠL EE", - "ĠAndre as", - "ed d", - "ĠìĹ ´ë", - "ĠAirl ines", - "ãģĵ ãĤį", - "Ġval ores", - "ê· ľ", - "H y", - "Ġзад аÑĩ", - "ĠKend all", - "ĠÑħ аÑĢ", - "ĠV amp", - "Ġpy thon", - "Ġmanage able", - "ĠG ente", - "o ise", - "ici ary", - "Ġimp oss", - "ĠBun ny", - "iest a", - "And rew", - "Ġser t", - "ĠC ec", - "zz arella", - "Ġautom obile", - "ĠT iere", - "all ows", - "åĨ Ĩ", - "Ġë° Ģ", - "ĠSc orp", - "ĠJ elly", - "ag ara", - "ĠSt retch", - "Ġrede f", - "Ġexacer b", - "ĠS HA", - "é f", - "ors a", - "Ġflaw ed", - "ĠNo el", - "?! ?", - "Ġpro cent", - "Ġmen stru", - "ĠпÑĢо Ñĩ", - "Ġinf ants", - "ðŁİ µ", - "pa use", - "ĠR acing", - "Ġ194 8", - "Ġsuper intendent", - "id ores", - "id y", - "bra him", - "Ġunl ucky", - "Ġper k", - "an ci", - "Ġë§Įë Ĥĺ", - "ĠÐľÐ¾Ñģ кв", - "Ġfin ans", - "Ġdiferen cia", - "łĪ ìĿ´", - "éħ į", - "OR Y", - "ĠT ac", - "ÛĮ ا", - "Ġdes em", - "Ġваж но", - "ĠJ U", - "ĠìŀĪ ìŀĸìķĦìļĶ", - "ĠÎ Ŀ", - "Ġinform ations", - "ĠH EL", - "h st", - "Ġпог овоÑĢ", - "Ġvo iture", - "Ġre us", - "änd ig", - "ĠпоÑħ ож", - "j ing", - "Ġd ru", - "alt ra", - "Ġprodu its", - "Ġk ite", - "Ġeye ball", - "ĠB elt", - "ĠRestaur ant", - "Ġg amb", - "Ġpor ridge", - "it ters", - "Ġconver ts", - "Ġyard ım", - "Ġmáxim o", - "w irtschaft", - "Ġíķĺë Ĥĺë", - "Ġì¤ Ģ", - "Ġice berg", - "Ġvor bei", - "Ġ25 6", - "ocr atic", - "Ġreck less", - "on ner", - "Ġm ús", - "Ġlog ically", - "ĠPr ison", - "ĠNet z", - "Ġvac ant", - "Ġn immt", - "ĠH ARR", - "Ġз ов", - "ĠDe e", - "ring e", - "ni est", - "ĠR ules", - "ìĬ¤ë Ł½", - "cuss ions", - "Ġfl oral", - "Ġconstra ined", - "Ġdifferent iation", - "ĠQue bec", - "ĠÛģ ÛĮÚº", - "Ġpúblic a", - "it el", - "Ġaccommod ations", - "ĠGr ü", - "í ľ", - "Ġpick les", - "иÑĩеÑģ киÑħ", - "Ġcomm issions", - "ĠBa ek", - "Ġçoc uÄŁ", - "ĠMed ium", - "Ġperiod ically", - "Ġwonder fully", - "Ġstaff ing", - "ìĽ IJë", - "ri re", - "f le", - "ĠMc L", - "ĠÑĤ еп", - "ĠпеÑĢ ек", - "н олог", - "Ġíģ¬ ê²Į", - "çĻ¼ çı¾", - "Ġprosper ous", - "ĠSpirit ual", - "ĠCh ick", - "DI A", - "ĠÐŁÑĢ ивеÑĤ", - "Ġper ÃŃ", - "ÑĮ ÑİÑĤ", - "Ġconsult ants", - "ĠEar l", - "ä»Ĭ å¹´", - "Ġru ining", - "оÑĢ е", - "Ġpens er", - "Ġtak iej", - "Ġstrength ened", - "ĠLiqu id", - "он еÑĨ", - "ав аÑĤÑĮ", - "Ġcam er", - "Ġdisagre ement", - "Ġbat hing", - "ĠY osh", - "a al", - "pre chen", - "RIS ADAS", - "Ġsuper star", - "æģ Ń", - "лÑı ÑĤÑĮ", - "Ġn ib", - "ĠTh erm", - "ĠDAN IEL", - "Ġp aw", - "Ġliqu ids", - "Ġcapac it", - "ark en", - "Ġvag ina", - "Ġm ashed", - "Ġemer ges", - "ys cy", - "Ġun related", - "ĠGu ild", - "Ġin verted", - "it ives", - "T ra", - "Ġbe gr", - "Ġal te", - "ì§ ķ", - "ãĤģ ãģ¦", - "ĠÑĢазÑĢ абоÑĤ", - "f inder", - "Ġдал ее", - "Ġблаг одаÑĢ", - "walk er", - "Ġcr ater", - "ass adors", - "ren ces", - "ins ki", - "ĠK IM", - "ĠEll iot", - "20 17", - "ĠS r", - "ink a", - "ano v", - "Ġìŀĺë ª»", - "Ġpropriet ary", - "display style", - "ĠÑģ им", - "Ġиз б", - "ĠPan el", - "Ġinstinct s", - "ĠCommun ications", - "éº »", - "mid t", - "Ġë§Įëĵ¤ ìĸ´", - "ĠÑģл ова", - "ĠGil bert", - "缮 åīį", - "Т ак", - "voor beeld", - "е ÑİÑģÑĮ", - "ary n", - "que z", - "Ġd art", - "Ñĸ ÑĪ", - "ĠH ut", - "S al", - "Ġs outheast", - "Ġpestic ides", - "Ġhelicop ters", - "Ġend ured", - "i ada", - "Ġbre wing", - "ìĹ ¬ë", - "ĠÑģв обод", - "ĠS aints", - "ĠFr ançais", - "ĠEconom ics", - "Ġdis loc", - "oph obia", - "C amer", - "Ġnegoti ated", - "ĠÑģÑĤ али", - "ìĬ¤í ģ", - "og ie", - "Ġtsun ami", - "Ġpeel ed", - "Ġmotiv ations", - "è¨ Ń", - "ost at", - "fl an", - "ĠD AC", - "Ġk av", - "' RE", - "ĠPe arson", - "b be", - "c zenie", - "Ġaten ção", - "íĨµ ëł¹", - "ãģ£ ãģ¡", - "ĠÑĥд аÑĢ", - "Ġintrodu ctory", - "ĠI ci", - "ë ĮĢë", - "ak at", - "Ġt rench", - "Ġproceed ed", - "ĠCo in", - "Ġdere cho", - "ĠRed e", - "æ¯ Ľ", - "ан нÑĭй", - "Ġincarcer ated", - "ĠRich mond", - "R ock", - "ĠP av", - "ĠKar ma", - "ug es", - "Ġconte ú", - "ë ¹Ħ", - "Ġê·¸ë §Į", - "ĠG one", - "Ġwsp óÅĤ", - "ĠRah men", - "un ken", - "Ġì¤ijìļĶ íķľ", - "Ġi b", - "Ġatt aching", - "H ay", - "Ġsu ka", - "ìį ¹", - "Ġpivot al", - "ĠRes pect", - "ÃŃ da", - "I B", - "ĠVer antwort", - "w iet", - "Ġforens ic", - "ÑĢи ÑģÑĤ", - "ĠпÑĢинÑĨип е", - "Ġmark ings", - "Ġk ettle", - "ĠOper a", - "ĠDo ctors", - "Ġshred ded", - "Ġrec uer", - "Ġvig il", - "ĠF ail", - "Ġentre v", - "Ġд ÑĥÑĪ", - "Ġout breaks", - "èµ° åIJ§", - "ĠÏĢ ο", - "Ġro gue", - "ang led", - "Ġyear ly", - "ĠCre ed", - "Ġw am", - "Ġlot us", - "ê³ ¼ë", - "ãĢģ ãĢģ", - "ĠSp it", - "ĠIt u", - "Ġstra ins", - "Ġstamp ed", - "Ġpl aint", - "Ġpot ion", - "Ġconsolid ation", - "è© ķ", - "оÑĩ кÑĥ", - "Ġvlog ging", - "Ġsl ate", - "ĠAu ft", - "ĠInc or", - "ừ ng", - "§ IJ", - "en h", - "Ġhe iÃŁ", - "Ġdom est", - "ĠSt rom", - "åį ³", - "ak is", - "Ġfra gen", - "Ġfin er", - "ĠS ug", - "Ġup hill", - "Ġé én", - "âĢ¦ )", - "ĠÑģ оп", - "ĠCore y", - "Ġsie bie", - "Ġm use", - "Ġclo ves", - "Ġp ous", - "ĠFin anz", - "ĠR oute", - "am at", - "Ġmut ually", - "ĠвнÑĥÑĤ ÑĢи", - "ĠSel ena", - "ë Ķ", - "ĠGa ussian", - "ë ¶ĢíĦ°", - "Ġ×ij× Ľ", - "Ġej erc", - "å¾ ®", - "ke a", - "ĠG erry", - "ĠS ic", - "大 çļĦ", - "Ġ196 6", - "ies e", - "Ġfoss ils", - "Ġest ad", - "ĠK ane", - "ci Äĩ", - "Ġìľł íĬľë", - "Ġп ам", - "ĠCru ise", - "int érieur", - "Ġbe kannt", - "ĠP ode", - "Ġdem ander", - "R em", - "Ġinv ade", - "Ġdecor ating", - "rop ic", - "Ġcow boy", - "ĠPh oto", - "opol it", - "Ġì»¬ë Ł¬ë", - "Ġre ap", - "Ġhand writing", - "à¹Ħ ร", - "Ġë ļ", - "Ġب عد", - "ĠM t", - "Ù Ģ", - "Ġspaces hip", - "Ġnational ism", - "Ġcouncil s", - "ĠGriff in", - "ĠAh med", - "Ġcl ich", - "ĠO L", - "w l", - "ĠPil ot", - "å® ®", - "Ġacron ym", - "Ġg els", - "Ġelectro ly", - "è ĵ", - "Ġм ной", - "Ġepis od", - "ĠDies es", - "ĠAT P", - "Ġed iyorum", - "Ġexpress es", - "Ġexhib its", - "C omm", - "Ġк ÑĢÑĥп", - "Ġmat ar", - "Ġ20 25", - "ĠArt em", - "vas ive", - "r Ãł", - "Ġbe ÅŁ", - "é» ĥ", - "Ġliz ard", - "Ġfill e", - "Ġì§ Ī문", - "Ġмо Ñī", - "Ġt ür", - "Ġcul prit", - "Ġwo ven", - "ĠAN Y", - "n im", - "Ġt ay", - "Ġprom in", - "Ġacom pa", - "Ġid é", - "Ġbo iler", - "ĠThe men", - "Ġaven ue", - "ĠM ud", - "Ġнов Ñĭе", - "Ġwitness ing", - "Ġl ance", - "ĠCH AN", - "ĠBe ver", - "ت Ùħ", - "Ġchem otherapy", - "K ing", - "ĠbÄĻd ÄĻ", - "Ġat ual", - "Ġt ive", - "Ġtalk in", - "Ġqued ar", - "ie ÃŁ", - "ed el", - "Ġìĸ´ì łľ", - "Ġjog ar", - "Ġö r", - "Ġundert aking", - "ĠStre ngth", - "Ġmil hões", - "ĠW ine", - "ĠM olt", - "è® ²", - "ãģij ãĤĮ", - "Ġunderm ine", - "ĠArch ives", - "v ana", - "mer cial", - "M C", - "Ġcast e", - "п ÑĢ", - "Ġlegisl ators", - "ul ators", - "ên io", - "Ġëį °ë", - "ĠÑħоÑĤ иÑĤе", - "Ġн ек", - "Ġs urn", - "Ġcons ci", - "ĠP OW", - "Ġcul inary", - "ĠK AT", - "ĠFol ks", - "Ñĭв аем", - "Ġв ок", - "ãģij ãĤĭ", - "s ervice", - "pt s", - "Ġпоб ед", - "æĺ¯ åķĬ", - "Ġt ents", - "Ġn ord", - "ST E", - "Ġrepublic an", - "Ġwy k", - "Ġmin ions", - "èĻ ķ", - "Ġmem ang", - "j est", - "Ġcompar ative", - "Ġty le", - "car bon", - "bed ingt", - "ks en", - "Ġneg ativity", - "Ġsjäl v", - "Ġd ú", - "æīĢ æľī", - "Ġrec alled", - "c ra", - "ĠT ada", - "ĠÑĢÑĥ ки", - "ĠопÑĢед ел", - "Ġproc rast", - "Ġjog os", - "ĠO o", - "ĠHe arts", - "Ġé ch", - "Ġksi Äħż", - "Ġco arse", - "ĠT ube", - "ĠG reens", - "Ġé n", - "Ġdumb bell", - "ĠÑĤ и", - "Ġquer er", - "ا ØŃ", - "Ïĥ ει", - "ĠпÑĢав илÑĮно", - "Ġп ап", - "Ġcomp ra", - "Ġt ér", - "ĠAnt es", - "Ġoptim um", - "Ġbisc uit", - "κ ι", - "acz ego", - "Ġìĭľê°Ħ ìĿ´", - "ĠMar ines", - "ver o", - "Ġvacc inations", - "Ġpet ty", - "rit ers", - "Ġа л", - "count ry", - "Ġcoun ters", - "Ġattend ant", - "ĠH ui", - "ãģ¨ãģĦãģĨãģĵãģ¨ ãģ§", - "ck a", - "ÑģÑĤвен нÑĭй", - "gu y", - "Ġtrick ed", - "ĠR ED", - "Ġthr illing", - "ÏĢο ι", - "Ġpig gy", - "Ġan unci", - "OR TER", - "ĠVal ue", - "Ġr ond", - "ĠA DA", - "Ġpos er", - "h ores", - "ĠR oland", - "ĵ ¯", - "Ġno ir", - "Ġש ×IJ×", - "ë° ľ", - "iem and", - "ĠпоÑĤ еÑĢ", - "ê³ ³", - "Ġê± ±", - "Ġformat ting", - "ĠL ed", - "è§Ģ çľ¾", - "Ġkill ers", - "ĠÄij ấy", - "Ġha ar", - "ag ain", - "! > [", - "min ster", - "Ġв ли", - "Ġident ifier", - "ĠLamb da", - "Ġtr os", - "Ġflaw less", - "Ġdetriment al", - "Ġbun ları", - "W ar", - "Ġreg ião", - "羣çļĦ æĺ¯", - "ĠB ike", - "cess ors", - "Ġc ùng", - "ĠR N", - "Ġê½ ĥ", - "Ġküç ük", - "ĠBegin ning", - "íĺ ¸ë", - "Ġge we", - "Ġden ote", - "ĠAlber to", - "Ġprob iot", - "Ġo de", - "Ġmol ar", - "Ġburst ing", - "ass umed", - "Ġfoot prints", - "ved a", - "Ġstero ids", - "Ġfl aming", - "ĠE ller", - "Ġerk ennen", - "ät zen", - "Ġlife cycle", - "ĠD OU", - "ĠK arena", - "ĠGuer ra", - "è¿ĺ æĺ¯", - "Ġsin ister", - "Ġpod éis", - "Ġpar ab", - "Ġok o", - "Ġmat éri", - "Ġcar ic", - "son aro", - "Ġpratic amente", - "ÑĥÑģ а", - "Ġcomun que", - "Ġvig ilant", - "Ġreg imes", - "ĠShoot ing", - "Ġra ids", - "ĠN ora", - "ĠW ieder", - "m ens", - "ĠÑģ од", - "Ġê²½ìļ° ìĹIJëĬĶ", - "Ġв Ñħод", - "Ġaut obi", - "ĠS chn", - "ĠRob bie", - "ĠF itness", - "Ġкон ÑĦ", - "Ġpeng uin", - "моÑĤÑĢ Ñı", - "Ġми ним", - "play s", - "Ġdeleg ates", - "M er", - "Ġsist em", - "ĠMicha els", - "m ale", - "ا ع", - "Ġcá ch", - "ĠH ä", - "Ġ×Ļ ×ķ×ĵ×¢", - "Ġsuper power", - "Ġstr on", - "Ġro ver", - "Ġdé pend", - "éĻ ³", - "Ġret iring", - "Ġvamp ires", - "Ġmer de", - "ĠCh anging", - "Ġt ame", - "Ġspokes person", - "Ġc ay", - "Ġfl irting", - "ĠGr ö", - "Ġw är", - "Ġwy b", - "Ġcoe ur", - "ạ nh", - "ĠìĻĢ ìĦľ", - "Ġconna is", - "ĠHundred s", - "ĠBe a", - "Ġα ÏĢ", - "pr uch", - "Ġsocied ade", - "ĠWh ilst", - "ĠK ait", - "esp ace", - "Ġch ia", - "ĠEr m", - "Ġë°Ķ ê¿", - "Ġf ences", - "ĠM ortal", - "ê² ģ", - "Ġг ÑĢаÑĦ", - "ĠHom eland", - "ĠJ UN", - "is st", - "Ġpar lar", - "Ġsport y", - "é o", - "Ġdeep en", - "ĠBeh avior", - "éĢ ı", - "åĵĪåĵĪ åĵĪ", - "Ġer rand", - "Ġrot ary", - "ĠWell ington", - "W ind", - "Ġmes ela", - "ả ng", - "iend e", - "Ġex cell", - "ĠGen ius", - "ĠEdu ardo", - "æľī 人", - "ĠÅŁ unu", - "ĠÄ° stanbul", - "Ġprod uto", - "Ġ ãħİãħİ", - "O FF", - "Ġwoll t", - "çĪ Ĩ", - "Ġëī´ì Ĭ¤", - "Ġl ass", - "Ġher tz", - "Ġar omatic", - "Ġзв он", - "Ġaut oc", - "ĠL ust", - "Ġ11 2", - "ĠÎ Ĺ", - "Ġreview ers", - "Ġrecept ive", - "å°į äºĨ", - "â nd", - "og lo", - "ĠìķĦëĭ Ļ", - "Ġn go", - "Ñĸ ÑĤи", - "Ã¥ t", - "con o", - "Ġtek rar", - "Ġ주 ê³ł", - "Ġgel miÅŁ", - "Ġbed time", - "ĠAr gh", - "AD A", - "ĠгоÑĢод а", - "ĠÄ ĩ", - "Ġall iances", - "g iggling", - "Ġyer de", - "Ġsp ies", - "Ġg utes", - "ç i", - "Ġallt id", - "ĠL ah", - "ŀ IJë", - "Ġdo kÅĤad", - "ÙĪ ÙĬ", - "Ġtoxic ity", - "Ġcancell ation", - "Ġ195 8", - "d ro", - "Ġìŀij ìĿĢ", - "ĠMotor ola", - "Ġmult in", - "Ġenthusi asts", - "ĠM ighty", - "ĠCoc onut", - ": ãĢĮ", - "ĠPict ures", - "Ġsang re", - "Ġbl inking", - "ol esome", - "ĠìĬ¤íĥĢ ìĿ¼", - "F P", - "Ġboom ing", - "ĠдеÑģÑı ÑĤ", - "Ġr atchet", - "Ġtim elines", - "len ess", - "Ġc ages", - "ĠGood night", - "omet imes", - "Ġc unning", - "ĠR isk", - "ul ed", - "d ade", - "Ġpr ata", - "Ġgust arÃŃa", - "am us", - "ĠJin ping", - "Ġest rut", - "Ġdescob rir", - "ĠM Äģ", - "ĠAll an", - "Ġ åĪĨ", - "Ġ×ľ× §", - "Ġpres erv", - "ĠStraw berry", - "Ä ı", - "L u", - "Ġk ro", - "ĠRep orts", - "ìħĶ ìķ¼", - "Ġval t", - "Ġpouv ait", - "Ġapp ar", - "ĠB one", - "Ġprefer ably", - "ĠRep ública", - "å°± åĪ°", - "Ġher zlich", - "Ġchim ney", - "Ġç ev", - "Ġvis as", - "Ġver r", - "Ġcultiv ation", - "ĠArmen ia", - "Ġвд ÑĢÑĥг", - "Ġcock ro", - "retch ed", - "art z", - "ĠлÑİд Ñıм", - "ĠpolÃŃt icas", - "ĠP anz", - "ĠA KA", - "ĠëĪ Į룬", - "Ġer ro", - "Ġcam per", - "Ġ10 2", - "ठ¸", - "d one", - "Ġho ard", - "ĠÐŁÐ¾ÑĤ ом", - "je ong", - "Ġdest a", - "p ak", - "Ġin im", - "Ġgrow ers", - "ĠMess age", - "Ġele ctor", - "eng age", - "ĠFor bes", - "ĠCincinn ati", - "Ġdiffé rence", - "d f", - "Ġsp ar", - "Ġawait s", - "ĠUSS R", - "ĠR ising", - "ĠHo ÅŁ", - "Ġfoot ing", - "Ġcond iciones", - "ÑĤоÑĢ ов", - "Ġclin ician", - "ĠDisk uss", - "å£ ĵ", - "ר ×Ĵ", - "× ¥", - "ite it", - "g ren", - "Ġchar isma", - "Ġle uke", - "Ġirrit ating", - "Ġcir ca", - "ĠRhod es", - "Ġp ior", - "Ġhandic ap", - "roy able", - "Ġv ull", - "O G", - "Ġin ÃŃcio", - "ier i", - "Ġspl ashing", - "Ġdem ise", - "Ġassist ir", - "Ñĩ ÑĤо", - "Ġcover t", - "ĠG ud", - "ภī", - "kl är", - "ĠìŀIJ 꾸", - "Ġver ändert", - "ĠR EM", - "ĠCon ven", - "at ge", - "Ġpierws ze", - "Ġcler gy", - "ling ton", - "l iv", - "V PN", - "ĠÑģ ожал", - "ĠH ate", - "ãģ¨ ãģĵãĤį", - "ÏĨ ο", - "ĠResp ons", - "оз д", - "Ġet mek", - "Ġchem in", - "Ùħ Ø©", - "Ġê°Ģ 족", - "T re", - "Ġum as", - "ĠBur ton", - "Ġpatri arch", - "ĠSmithson ian", - "¥ ĺ", - "M oon", - "A ir", - "Ġmed ios", - "Ġer aser", - "Ġwoll ten", - "Ġpare il", - "ĠBill ie", - "æĬ ½", - "еÑĢÑĤ в", - "Ġparl ament", - "Ġag ony", - "ĠQU E", - "sequ ently", - "An other", - "ĠWh ew", - "ĠAnn ual", - "Ġse ben", - "ìĥģ ìĿĦ", - "val ues", - "ŀľë §Į", - "Ġsin on", - "ere al", - "ĠEn light", - "ĠChem istry", - "ĠCatal unya", - "Ġdoct r", - "ant on", - "Ġst uk", - "ĠPl ate", - "ĠKardash ian", - "Ġfil os", - "ĠW et", - "Ġпоп ÑĭÑĤ", - "Ġunknown s", - "ĠSch on", - "ĠBald win", - "Ġtelescop es", - "ĠG ucci", - "ox ide", - "ĠConserv ative", - "ìĦ± ìĿĦ", - "Ġhina us", - "P ower", - "Ġê±´ ê°ķ", - "Ġprev ail", - "orm an", - "m achine", - "Ġ194 6", - "Ġun bel", - "Ġsch aut", - "Ġp iel", - "e enth", - "Ġobject ively", - "Ġch akra", - "aud io", - "Ġch icos", - "ĠV ault", - "å° Ī", - "Ġmedic inal", - "ĠT ail", - "Wh ile", - "Ġas phalt", - "Ġfro ze", - "ĠE K", - "unch ing", - "n osis", - "20 15", - "ĠG ri", - "Ġodd ly", - "ĠM är", - "ĠA eg", - "c olo", - "P ar", - "Ġëĵ¤ ìĸ´ë", - "Ġv inden", - "ĠO VER", - "Ġ iced", - "Ġsc orp", - "Ġha c", - "qual ified", - "ĠÑĥвид еÑĤÑĮ", - "erm o", - "H EN", - "Ġso i", - "Ġmulti ples", - "Ġlay outs", - "Ġblind ness", - "ĠB owser", - "Ġпод ÑĤ", - "Ġà İ", - "vention al", - "Ġm ata", - "mad ı", - "Ġge ez", - "Ġcad ence", - "Ġważ ne", - "ĠChrist ie", - "ven ge", - "C all", - "Ġturn around", - "Ġblo b", - "ĠЯ к", - "ĠVoice over", - "Ġper il", - "ĠJa ime", - "ĠH OY", - "l ane", - "Ġse bel", - "ĠDu o", - "ĠHistor ical", - "Ġd ni", - "Ġg ema", - "y k", - "Ġsab em", - "ắ ng", - "Ġv ars", - "ĠRon nie", - "ĠRon aldo", - "ĠPer què", - "ns inn", - "h air", - "Ġrelent less", - "Ġl yn", - "Ġtravel er", - "æĢİ麼 äºĨ", - "n ine", - "Ġant im", - "Ġì¼ Ģ", - "Ġsnow ball", - "ĠÑħаÑĢ акÑĤеÑĢ", - "Ġintern s", - "Ġconstitu ency", - "ĠÐĿ ам", - "׾ ׾", - "V EL", - "Ġvikt igt", - "Ġap oyo", - "ÙĦ ب", - "Ġj ard", - "Ġheight ened", - "ÑĢо ÑģÑĤ", - "ĠSM ITH", - "Ġдел а", - "Ġrepair ing", - "Ġr igt", - "ĠShe ikh", - "ĠBrit ney", - "Ġevery time", - "Ġadvent urous", - "oc key", - "er nt", - "Ġat aque", - "ĠAltern atively", - "e ffect", - "Ġpalav ras", - "ĠElli ott", - "Ġréuss i", - "Ġhypert ension", - "ĠMan ual", - "Ġproph etic", - "Ġhand c", - "ÑĮ е", - "Ġref rain", - "ĠSqu id", - "ìŀ ¡", - "Ġком ан", - "äll en", - "Ġlleg ó", - "Ġbas h", - "ion y", - "ĠÑģк лад", - "Ġк аб", - "Ġcare less", - "ĠP ool", - "Ġtr ás", - "Ġfil s", - "ĠSch r", - "Ġsp rawd", - "ĠMon aten", - "Ġunfor gettable", - "ĠCott on", - "Ġinconven ient", - "ĠR X", - "or is", - "Ġhum bled", - "ת ×Ĺ", - "ĠØ¢ Ù¾", - "Ġincre ÃŃ", - "ĠKomment are", - "èĪ Ĵ", - "r ación", - "Ġv antage", - "ĠSe al", - "ĠìĿ´ 거를", - "Ġjou e", - "ãģĿãģĨ ãģ§ãģĻãģŃ", - "Ġìĺ¤ë ŀĺ", - "ĠиÑģп ÑĭÑĤ", - "ob en", - "Ġgr ate", - "Ġcontro le", - "ĠPer cy", - "ÅĤ ada", - "Ġsimult aneous", - "Ġprot oty", - "ĠgroÃŁ er", - "Ġbew usst", - "iniz i", - "Ġpass ieren", - "ĠHapp iness", - "åī ĩ", - "sh i", - "ge ht", - "Ġstation ed", - "ĠErgeb nis", - "Ġdirect amente", - "Ġsurv ives", - "Ġperson es", - "BER G", - "Ġvom iting", - "Ġconhe cer", - "Ġad jour", - "ĠCiv ic", - "pe i", - "bur st", - "Ġëĭ¤ ëĭĪ", - "é ı", - "Ġsl ed", - "Ġplataform a", - "ĠS ect", - "ĠDe fin", - "çĻ» éĮ²", - "én om", - "chn et", - "Ġprofit ability", - "Ġerre icht", - "á»ı i", - "c ation", - "Ġì§Ģ ê¸", - "Ġperd re", - "Ġfel ony", - "Ġ195 7", - "æĪij å¾Ī", - "Ġunsuccess ful", - "Ġnag yon", - "Ġelastic ity", - "Ġfac ade", - "Ġearth ly", - "ĠамеÑĢик ан", - "Ġcon n", - "c la", - "D u", - "Ġpolit iques", - "Ġhal o", - "iant es", - "Ġмо ей", - "ãĥ³ ãĥī", - "ton es", - "el ier", - "è® ļ", - "ht aking", - "Ġwicht ige", - "Ġan no", - "ĠL ok", - "ill ions", - "Ġv iver", - "Ġsol chen", - "Ġsu f", - "ĠSal z", - "ĠN vidia", - "z uge", - "ĠSp ike", - "V ideo", - "Ġtw or", - "ĠA la", - "èij ī", - "Ġh anya", - "ĠAd m", - "ìĿ µ", - "ĠPatient en", - "ĠOn ion", - "ĠKo be", - "ĠSc ene", - "ĠR ash", - "æ¨ Ļ", - "ÑĢа ÑģÑĤ", - "ist ani", - "Gen eral", - "le ye", - "imb ap", - "Ġconce aled", - "ĠFr idays", - "ĠW ool", - "Ġнов ÑĭÑħ", - "Ø´ ر", - "Ġê²° ê³¼", - "Ġjed och", - "´ìĭ ľ", - "ĵ¤ ëıĦ", - "Ġìŀ¥ ëĤľ", - "uk t", - "L ou", - "Ġ먹 ìĸ´", - "ĠEx pect", - "Ġдом ой", - "Ġirrespons ible", - "Ġac erca", - "ĠZ ust", - "ר ×ĺ", - "U I", - "Ġyout ubers", - "ĠPos itive", - "Ġsoci oe", - "Ġsn atch", - "èĥ Į", - "Ġrefresh ed", - "Ġnom inations", - "ĠP att", - "Ġobsol ete", - "Ġdem iÅŁ", - "åı ¤", - "orm uÅŁ", - "ĠìĨĶì§ģ íŀĪ", - "Ġf la", - "Ġcra ziest", - "ĠZ ie", - "ĠT ú", - "z ep", - "ic em", - "Ġë©ĭ ìŀĪ", - "Ġcyn ical", - "ãģĿ ãĤĵãģª", - "Ġt resp", - "Ġcra z", - "Õ¥ Õ", - "Ġne lle", - "Ġm ph", - "ĠN ered", - "ĠK ob", - "ĠE ck", - "¨¸ ëĭĪ", - "J an", - "ĠТ огда", - "Ġde ci", - "ĠV og", - "Ġbubb ling", - "éĢ Ģ", - "ú a", - "Ġproduct os", - "iber al", - "Ġrepl icated", - "ĠImp rove", - "ill ary", - "C ha", - "Ġré du", - "ĥIJ íķĺë©´", - "Ġcon not", - "ĠK rit", - "ĠдÑĥÑħ ов", - "Ġtread mill", - "ĠP W", - "Ġзов ÑĥÑĤ", - "Ġcl ams", - "Ġdra fting", - "Ġ195 6", - "un ta", - "Ġexpend itures", - "ĠHoo ver", - "W OO", - "ÑĪе е", - "Ġded uction", - "mon ary", - "Ġreci b", - "Ġpo vo", - "Ġëį Ķë", - "ĠP AL", - "ĠBl ow", - "Ġwy p", - "Ġdest ac", - "de al", - "Gra eme", - "Ġnécess aire", - "Ġdamn ed", - "Ġ19 38", - "Ġìĭ¤ ìłľë¡ľ", - "Ġtro op", - "Ġinsight ful", - "ĠT J", - "ĠоÑģ в", - "Ġf idelity", - "ĠSk ip", - "ĠMay o", - "ë§ Ŀ", - "app e", - "Ġbl as", - "ĠW Y", - "ĠG N", - "ct ar", - "S u", - "Ġcu ent", - "he ws", - "Ġcorps es", - "A bs", - "Ġwaste water", - "Ġc iek", - "ĠOn u", - "Ġexplos ives", - "Ġar ma", - "ĠSTEP HAN", - "polit ik", - "ĠOs aka", - "ta ÅĤ", - "Ġyap ıyor", - "Ġiz quier", - "Ġbele za", - "ĠWy att", - "åIJ ¸", - "Ġsu k", - "Ġspec jal", - "Ġdan ke", - "wh istle", - "ĠfÃŃs ica", - "ĠHar riet", - "ĠìķĦ íĮĮ", - "Ġwill kommen", - "ip ing", - "ĠÑģмоÑĤÑĢ иÑĤе", - "Ġмож еÑĪÑĮ", - "Ġinacc urate", - "Ġarrog ance", - "ĠRem o", - "γ ά", - "ass ed", - "Ġdeliver ies", - "Ġst inky", - "ĠпеÑĢ еж", - "j ay", - "Ġtrans itional", - "Ġr ere", - "ĠNGO s", - "ĠAT M", - "Ø® ت", - "i ology", - "Ġв лад", - "Ġsch me", - "ĠSh ine", - "ìķ ¡", - "p ants", - "Ġser ge", - "Ġsen hor", - "Ġab duct", - "ĠBry ant", - "V ES", - "Ġawak ened", - "ĠL az", - "rop olis", - "ĠLa o", - "è¾Ľ èĭ¦", - "Ġvill a", - "Ġsumm ers", - "Ġent hal", - "Ġ194 9", - "V ia", - "Ġìĸ´ì ¨", - "Ġtend on", - "Ġviol et", - "Ġintellect ually", - "Ġboun ced", - "ara us", - "Ġ19 19", - "Ġvra ag", - "Ġsp el", - "ĠSch war", - "Sc ott", - "ĠInd o", - "Ġë§ Ŀ", - "Ġcanon ical", - "ĠI KE", - "Ġthat ÃŃs", - "Ġme llan", - "æ¯ Ĵ", - "ig mat", - "C ould", - "... ?)", - "Ġfo arte", - "ĠKum ar", - "rend o", - "Ġél é", - "à ´", - "val uation", - "c ases", - "Ġintuit ively", - "h ong", - "ett ed", - "Ġsou ven", - "Ġmor b", - "Ġc ors", - "ĠN V", - "ĠHas an", - "æĥħ åĨµ", - "ie ved", - "Ġì§Ģê¸Ī ìĿĢ", - "Ġdum pling", - "Ġcontr ôle", - "Ġambigu ity", - "æ©Ł æľĥ", - "Ġco g", - "ĠScript ures", - "Ġc ai", - "Ġbe ver", - "大家 éĥ½", - "Ġhu is", - "Ġa ime", - "Ġerkl ären", - "ĠL M", - "ĠF ey", - "éļ ¾", - "à®± த", - "Ġsuper vised", - "Ġje we", - "s pl", - "ĠÑĨенÑĤ ÑĢ", - "Ġcoll isions", - "ÙĦ Ùģ", - "ĠHog warts", - "ĠDur ham", - "×ķ× £", - "Ġphosph ate", - "Ġoverse e", - "Ġinspect ions", - "Ġbr inc", - "ĠZ ak", - "Ġpay off", - "Ġch aud", - "ĠHung er", - "ã os", - "v ir", - "Ġf iance", - "Ġb oug", - "l ived", - "c ry", - "åĽŀ ä¾Ĩ", - "Ġjoint ly", - "Ġgirl friends", - "ĠNe xus", - "¦¬ ê²łìĬµëĭĪëĭ¤", - "ĠK wang", - "åĵĪ åĽī", - "å§ ij", - "ÅĤ ÄĻ", - "ĠN eden", - "ie ce", - "Ġins erting", - "æŁ ĵ", - "ĠM ummy", - "ĠGlo be", - "Ġle e", - "Ġg erman", - "Ġcre ams", - "ach o", - "Ġch Æ°a", - "ĠGal ile", - "Ġfür s", - "Ġest iver", - "c idos", - "Christ ian", - "Ġlors qu", - "Ġcut est", - "v ale", - "ĠкÑĢ еп", - "Ġw ary", - "Ġslic ing", - "Ġesper ando", - "ĠV ander", - "ĠDe ixa", - "Ġ195 4", - "Ġmów iÄħ", - "Ñĸ ÑĶ", - "Ġtool ing", - "Ġrest or", - "Ġpos ición", - "Ġintent ar", - "ĠAp ache", - "OU L", - "ĠÙĪ ب", - "Ġmat ière", - "ãĥ¼ ãĤĵ", - "Ġl inen", - "Ġestrat ég", - "ĠMut ta", - "é¡ ¯", - "è¡Į äºĨ", - "Ġpart ing", - "Ġminim izing", - "Ġapp rendre", - "æľ Ŀ", - "Ġан глий", - "ĠDo o", - "ĠFire fox", - "c ómo", - "Ġge opolit", - "Ġmak an", - "Ġmog elijk", - "ĠÏĢε Ïģι", - "Ġcá» ©", - "Ġinstall er", - "Ġdib uj", - "ĠHe ath", - "lo op", - "ĠBro ken", - "HY UN", - "sh elf", - "Ġf izer", - "Ġenh ances", - "ä¾ĭ ãģĪãģ°", - "Ġдо ÑģÑĤи", - "ĠP UB", - "ĠKolleg in", - "Ġatt ained", - "Ä ¾", - "Ġmist ress", - "ĠOft entimes", - "×ŀ ×Ļ×Ŀ", - "Ġbe we", - "ĠS ora", - "ra uen", - "ba um", - "Ġroll ers", - "Ġm ering", - "ĠP AC", - "Ġн Ñĸ", - "ĠRép ublique", - "ĠÑĤ ÑĢав", - "ĠV anguard", - "uc iones", - "Ġ무ë ĮĢ", - "Ġg our", - "¯ ¤", - "ĠÏ ī", - "Ġsa una", - "Ġpe ine", - "ĠVal erie", - "ĠS ikh", - "fend imiz", - "ber o", - "ĠÑĩ и", - "Ġdo ÅĽwiad", - "ĠE uros", - "Ġcomment aires", - "Ġtwe aks", - "ĠF aster", - "ĠÑĢаÑģ к", - "Ġprogress ively", - "ĠE uch", - "bor o", - "ĠIng red", - "C ap", - "Ġun check", - "Ġìĺ¤ë ¥¸", - "Ġw re", - "ĠF T", - "ör ung", - "Ġmemor ized", - "ĠD inner", - "ĠP hew", - "ou bl", - "Ġput a", - "Ġadm its", - "ез де", - "op od", - "Ġpand a", - "Ġhing es", - "ci pe", - "Ġtrans act", - "Ġpod ia", - "Ġp ics", - "Ġcriter ion", - "ĠOrchest ra", - "ĠBl og", - "Ġsolem n", - "ĠPix ar", - "Th ree", - "Ġв низ", - "ĠVol unte", - "ĠSav age", - "ĠPV C", - "ĠC af", - "Ġwy kon", - "Ġgrad ers", - "Ġcr ouch", - "Ġcl iche", - "Ġsoy beans", - "ĠM UR", - "ĠGonz alez", - "ĠM imi", - "ĠBol sonaro", - "Ġdi aphrag", - "Ġbil ang", - "ëIJĺ ëĬĶ", - "éĤ£ æĪijåĢij", - "Ġregul ating", - "M c", - "J udge", - "Ġн ож", - "Ġjak Äħ", - "ites se", - "ĠW ij", - "Ġl ata", - "gro aning", - "POS ING", - "Ġ×IJ×ķת ×ķ", - "Ġha ga", - "Ġground ing", - "Ġviol ently", - "Ġt ills", - "Ġeng ag", - "ĠHo llow", - "Ġпоп ÑĥлÑıÑĢ", - "Ġw prowad", - "Ġrepl aces", - "Ġfluores cent", - "urg ical", - "igg ly", - "ĠTrad itional", - "t te", - "ĠÙĦ Ùĩ", - "Ġphosph orus", - "Ġapr on", - "ĠWat ers", - "ĠK ultur", - "ав ай", - "Ġol ives", - "Ġ×Ķ×IJ× ľ", - "Ġteil weise", - "Ġsen cill", - "Ġprend s", - "Ġnarr ower", - "Ġj ätte", - "ĠInformation en", - "ìĥģ ìĿ´", - "Ġstar ve", - "Ġfr ick", - "ĠBe weg", - "ठ²", - "Ġdolph in", - "ĠLAUGH TER", - "ĠINTER VIE", - "åĶ ī", - "Ġyan lÄ±ÅŁ", - "Ġtor pedo", - "Ġshort ages", - "ìĿ´ë ĵľ", - "ıld ı", - "Ġp aws", - "Ġo zone", - "Ġcultiv ated", - "ĠF ot", - "Ġnot or", - "н оз", - "Ġко ÑĪ", - "Ġtouch screen", - "ĠAll y", - "æľĢ è¿ij", - "Ġ맼ìŀĪ ìĸ´ìļĶ", - "ĠС еÑĢ", - "Ġв полне", - "Ġpap rika", - "ĠDust in", - "Ġefect o", - "Ġop ini", - "Ġmu ut", - "Ġhá»į c", - "Ġinter ject", - "ÄĻ t", - "Ġbut ts", - "ure z", - "ĠP ike", - "ĠH ok", - "ĠGu inea", - "ĠCath edral", - "Ġ14 00", - "C ra", - "+ ,", - "ë§ Ľ", - "³´ë ıĦë¡Ŀ", - "aby rin", - "Ġvide og", - "Ġо ÑĢÑĥж", - "Ġu ž", - "Ġbus cando", - "ĠAss istance", - "éĻ ½", - "Ġmel hores", - "ì¡ ´", - "Ġëģ ¼", - "ĠR J", - "Ġت Ùħ", - "Ġo min", - "Ġmotor cycles", - "ĠS app", - "Ġsupply ing", - "ĠAl gun", - "Ġaer ospace", - "×¢ ׾", - "oc cup", - "le ist", - "Ġê±° ëĬĶ", - "Ġcomplet a", - "b res", - "! (", - "ĠÐŁÑĢ ед", - "Ġdisadvant aged", - "ĠAtt end", - "ĠJud ah", - "á»ĭ ch", - "yl ene", - "act ly", - "Ġset ups", - "Ġammon ia", - "ĠSchwe iz", - "ĠSh ame", - "Ġband e", - "ĠF uel", - "Ġtroubles ome", - "Ġnum ero", - "ĠM OM", - "ĠпÑĢед лаг", - "ment ioned", - "ĠболÑĮÑĪ ое", - "ĠVikt or", - "ĠSty les", - "Ġcruc ified", - "ructure d", - "en viron", - "Ġmor als", - "Ġmed itating", - "Ġax ial", - "is ance", - "ĠAb st", - "G reen", - "Ġê± ´ì", - "Ġquad rant", - "Ġper gi", - "Ġcamer aman", - "ĠSe qu", - "Ġpa used", - "ĠLa ughing", - "ê· Ģ", - "? ..", - "ĠÅ» e", - "Ġpermit ir", - "Ġdetect ors", - "ĠH UD", - "av al", - "ĠìĹ¬ê¸° ê¹Įì§Ģ", - "Ġh ubs", - "Ġbest immt", - "ĠбÑĥдеÑĤ е", - "INTER POSING", - "Ġten gan", - "Ġcra ve", - "ĠBundes regierung", - "ĠBlo ody", - "Ġus ability", - "ĠE as", - "ĠÄijá»Ļ ng", - "Ġ195 5", - "Ġkrie gen", - "Ġhabit ual", - "Ġessential s", - "rim inal", - "Ġroomm ates", - "éĤ£ å°±", - "ĠпеÑĢе Ñħод", - "Ġng hi", - "Ġmen ing", - "ĠSym phony", - "ĠH ug", - "ag gi", - "Ġw ied", - "Ġmit ad", - "ãģ£ãģ¦ ãģĦãģĨ", - "te enth", - "ida Äĩ", - "S ave", - "Ġrob iÄĩ", - "Ġboun ces", - "° ĸìĹIJ", - "st ars", - "Ġprag matic", - "Ġcogn ition", - "Ġwra pper", - "Ġw arten", - "ad h", - "Ġpens a", - "ĠHert z", - "Ġn ÄĽ", - "ĠRe id", - "ĠPC s", - "ĠMo le", - "Ġ.. ...", - "Ġpre cio", - "ĠChampions hips", - "ê°Ģë Ŀ½", - "Ġv ér", - "Ġcorrid ors", - "ĠElect ronic", - "S l", - "Ġа ле", - "Ġoverth row", - "Ġk abul", - "ĠR ES", - "ĠCyber punk", - "ог од", - "ĠÐĿ ав", - "Ġw an", - "Ġmanifest ations", - "Ġcual es", - "ĠW ise", - "ĠLös ung", - "Ġex fol", - "Ġearn s", - "ÑĥÑģÑĤ иÑĤÑĮ", - "Ġsa pp", - "ĠBra un", - "ĠBRAND ON", - "ì¹ Ļ", - "Ġs ano", - "ĠF EL", - "Ñĭв айÑĤеÑģÑĮ", - "ожд ениÑı", - "Ġse wn", - "F un", - "Ġrecipro cal", - "Ġexpans ive", - "ĠTra ffic", - "Ġktóre go", - "ĠÙĪ س", - "æĺ ¥", - "Ġë¹ ¨", - "pro ve", - "ig are", - "Ġlo h", - "Ø§Ø ¶", - "H ope", - "Ġdevote es", - "ĠG om", - "Ġste als", - "ĠU ms", - "ĠTw ice", - "ãĤ ²", - "iy im", - "Ġrhythm ic", - "ĠV orte", - "Ġpref ix", - "om ination", - "Ġdat o", - "Ġcust ard", - "ĠVO ICE", - "å· ŀ", - "Ġmen y", - "ist ors", - "Ġíĺ ij", - "ĠìĤ´ì ķĦ", - "Ġíĥ Ħ", - "Ġk ort", - "Ġab a", - "ĠV era", - "ep y", - "Ġì¹´ë©Ķë Ŀ¼", - "Ġsubmer ged", - "ĠC lock", - "Ġthumbna ils", - "Ġbo ast", - "ĠF are", - "!! ]", - "ĠÅĽ m", - "Ġkaik ki", - "ĠTechn ologies", - "ìĻ ¸", - "ãĥ Ĵ", - "иÑĤ ай", - "å°ı æĻĤ", - "Ġа ÑĤ", - "Ġkn obs", - "Ġre icht", - "ượ ng", - "gl io", - "Ġ맼 ìĿ´", - "ê°IJ ìĿĦ", - "Ġjot ka", - "ĠHand y", - "ĠHab en", - "n ous", - "Ġin land", - "Ġam azon", - "ho oting", - "S L", - "Ġle isten", - "~ \"", - "Ġprov oke", - "ĠTw ist", - "Ġ×ij× Ĺ", - "Ġdepart ed", - "ê° ľë¥¼", - "Ġk onse", - "ĠCar wyn", - "íķĺ ìĭł", - "ident al", - "ES CO", - "Ġt teokbokki", - "Ġdiz endo", - "ç· ´", - "ınd aki", - "imas u", - "af ar", - "Ġland fill", - "Ġcorrect ing", - "Ġcle ars", - "ĠNum mer", - "H AM", - "Ġcart ridges", - "ĠDies el", - "p aced", - "Ġobl iv", - "Ġmoy ens", - "ĠSin ne", - "ĠPre is", - "il iz", - "ĠÑģм ож", - "Ġbroad en", - "ä»ĸ æĺ¯", - "x es", - "Ġcarbohyd rate", - "íĺ ¹", - "se ok", - "Ġecho es", - "Ġc ess", - "ë° Ķ", - "Ġб изнеÑģ", - "Ġllam ado", - "Ġess ent", - "ĠìĿ¼ë °ĺ", - "ĠA ires", - "ph en", - "Ġze bra", - "Ġsymbol ism", - "On ce", - "Ġr acks", - "ĠKaf ka", - "ĠÑģеÑĢÑĮ ез", - "Ġsin n", - "p icious", - "ka a", - "Ġmotherf ucker", - "Ġapprentices hip", - "Ġr pm", - "Ġtax ation", - "Ġfur ry", - "ĠSac red", - "ĠÑĢаз м", - "por a", - "eng es", - "ĠíĹ Īë", - "ĠÑģ ин", - "Ġsanit izer", - "Ġcr inge", - "ĠS ca", - "оÑĩ но", - "Ġof ere", - "Ġmel odies", - "ĠVel vet", - "ĠIhr er", - "ĠHy brid", - "ĠG iov", - "Ġirgend was", - "Ġdep ende", - "ĠUs ers", - "Ġh ump", - "dri ving", - "Ġs f", - "Ġruth less", - "à¹ĢภĦ", - "Ġlem ons", - "Ġfö ret", - "ĠO j", - "Ġм ама", - "Ġinter personal", - "Ġge v", - "Ġab norm", - "иÑģ л", - "Ġин д", - "Ġkont roll", - "Ġreg res", - "Ġled ge", - "Ġerzäh lt", - "ĠT act", - "Ġarri vé", - "Ġsubstant ive", - "Ġspoon ful", - "zw ischen", - "oooo o", - "Ġconten ido", - "Ġbes l", - "á»ĥ m", - "k ten", - "Jam ie", - "Ġsand y", - "ä¸į åIJĮ", - "â ĭ", - "Ġp ase", - "Ġdet te", - "ĠBelg ian", - "ê° ľë", - "ula res", - "r ud", - "ig or", - "ĠíĮ ¬ë", - "Ġremed ies", - "Ġblast ing", - "ĠS ich", - "Ġож ид", - "Ġmon str", - "Ġmanif old", - "Ġglaub en", - "ĠE ST", - "Ġstream line", - "Ġlobb ying", - "ĠGoth ic", - "to ire", - ".. '", - "Ġdém ocr", - "Ġнаб лÑİд", - "Ġwsp ól", - "ĠczÄĻ ÅĽÄĩ", - "ä¸ĭ éĿ¢", - "is és", - "g angen", - "Ġbez pie", - "rem lin", - "ê° Ŀ", - "St ill", - "Ġres ides", - "Ġgele cek", - "Ġtélé phone", - "Ġpe wn", - "Ġle opard", - "Ġcompliment ary", - "Ġc rib", - "ĠAnim als", - "Ġge il", - "ess el", - "Ġgard er", - "Ġcatch y", - "æ¨ ¹", - "ĠE ts", - "ĠCom mercial", - "ĠD ENNIS", - "ĠCoordin ator", - "ĠAb igail", - "ffff ff", - "ấ p", - "Ġpeque ña", - "Ġinject ions", - "ce kt", - "Ġphilanthrop y", - "Ġp uck", - "Ġcelebr ates", - "ĠD unk", - "ĠD latego", - "ãģ¾ ãģł", - "δ ή", - "grad uate", - "ĠM obil", - "t ill", - "ac am", - "Ġyol ks", - "Ġtang led", - "Ġman iac", - "Ġoblig ed", - "ĠLa ink", - "Ġver der", - "ĠDam on", - "Ġmut ant", - "Ġhop ping", - "Ġre ins", - "Ġinver ter", - "Ġcont empt", - "׳ ס", - "le arning", - "M iss", - "ĠÐĵ оÑģ", - "ĠMe yer", - "ê»ĺ ìĦľ", - "é£ İ", - "×ķ׳ ×Ļ×Ŀ", - "ask ing", - "Ġtrim ming", - "Ġtre asury", - "Ġs ente", - "A ust", - "ĠUnterstüt zung", - "ĠCom edy", - "ĠAn akin", - "é ¹", - "ÑĢÑĥ ÑĤ", - "ĠH ari", - "ograph ers", - "Ġoat meal", - "ĠB ots", - "ä¸į äºĨ", - "Ġп алÑĮ", - "Ġacknowledge ment", - "x ic", - "Ġê´Ģ ìĭ¬", - "gas ping", - "Ġãģ ķ", - "Ġterr ace", - "Ġor naments", - "ĠM ER", - "comm ittee", - "ĠìĹĨ ìĬµëĭĪëĭ¤", - "Ġr ij", - "é ³", - "צ ×Ŀ", - "le me", - "Ġlibert ies", - "Ġfell as", - "ĠCop per", - "ben ch", - "ĠIde a", - "á»į n", - "ÑĪ а", - "Ġvers ión", - "ÏĦο Ïį", - "ĠÐľ и", - "ĠпÑĢил ож", - "Ġbox er", - "ĠT anner", - "ĠM oy", - "ì¹ĺ ëĬĶ", - "T hr", - "Ġtin ham", - "Ġpol ishing", - "Ġconsequ ently", - "Ġamen ities", - "ĠK I", - "ĠGRE EN", - "ĠFrank ie", - "н иÑĤ", - "itt el", - "Ñģ кое", - "urs ed", - "Ġup bringing", - "Ġth ứ", - "ĠìĭĿ ìľ¼ë¡ľ", - "Ġwh im", - "Ġchin ese", - "conf idence", - "ĠJ eder", - "ãģª ãģ®ãģ§", - "aj cie", - "ĠT ous", - "ĠPow ers", - "ừ a", - "other mal", - "ĠвÑĭ ÑĪе", - "r ale", - "Ø§Ø ®", - "Ġì§Ģ ìĽIJ", - "Ġép isode", - "Ġsul ph", - "Ġenc ara", - "k raft", - "alar ı", - "ĠCom es", - "Ġdiv ul", - "ĠRud olph", - "ĠM use", - "Ġut ens", - "ĠìŀIJ 주", - "Ġp ana", - "ĠVeget a", - "ĠPH P", - "ĠN SA", - "ent in", - "ĠCarne gie", - "ا ÙĬ", - "iÄĻ cy", - "H arry", - "Ġf ır", - "С п", - "Ġglad ly", - "Ġaver aging", - "íķĺ ê²łìĬµëĭĪëĭ¤", - "лÑı ÑİÑĤÑģÑı", - "ĠÐľ енÑı", - "Ġquot ation", - "ri res", - "itch ens", - "ay ed", - "Ġun att", - "ĠP erez", - "ĠоÑĤ меÑĤ", - "Ġtact ile", - "ĠEu h", - "is ini", - "b uh", - "Ġhat ır", - "ĠìŀĪ ìľ¼", - "Ġpolicy makers", - "³´ì Ħ¸ìļĶ", - "ac ı", - "Ġκ ι", - "Ġregister ing", - "re to", - "ĠSpr inkle", - "ĠGram my", - "ax ter", - "Ġб и", - "Ġsit ter", - "Ġpred ic", - "Ġthin ly", - "Ġstr um", - "Ġag grav", - "Ġa ha", - "ر ج", - "m ellow", - "Ġconst ante", - "ĠL aut", - "ist on", - "Ġtransition ed", - "ĠCamb odia", - "ãģĦ ãģįãģ¾ãģĻ", - "è·Ł 大家", - "art ed", - "Ġmis f", - "ĠPunk te", - "Įë ĵł", - "Ġtremb ling", - "Ġges pannt", - "ĠعÙĦÙĬ Ùĩ", - "Ġникак иÑħ", - "Ġë¶Ģë ĵľë", - "ĠÑĢазв иÑĤ", - "Ġit chy", - "Ġc iento", - "Ġpl ains", - "Ġk ittens", - "Ġback log", - "ĠPres iding", - "pt a", - "Ġha voc", - "ĠDarr in", - "ĠÐĽÑİ Ð±", - "Ġsegreg ated", - "Ġg hetto", - "Ġerle bt", - "Ġdrug iej", - "ĠSi xt", - "åı ĥ", - "ร ะ", - "uen cia", - "Ġíķĺ 기", - "ĠëĨ į", - "Ġrob i", - "Ġpione ers", - "Ġmilli ards", - "ĠWitch er", - "Ġ무ìĹ ĩ", - "or ro", - "m ass", - "Ġdiver gence", - "ĠRiver a", - "ĠNo odles", - "Ġend roit", - "ĠK osten", - "ĠдÑĢÑĥг а", - "ĠmÃŃn imo", - "ĠKazakh stan", - "ت Ùĩ", - "Ġвоз дÑĥ", - "Ġgesch rieben", - "ĠN il", - "Ñģ ки", - "ĠFr üh", - "Ġbever ages", - "æº IJ", - "ĠG on", - "æĺ ¨", - "Ar in", - "ĠInt ro", - "ocaly ptic", - "Ġexhaust ion", - "ĠStat us", - "ĠBatter y", - "és z", - "£ ¼ë", - "air y", - "Ġë³´ìŬë ĵľë", - "Ġdispar ity", - "Ù Į", - "ĠTuc son", - "Ġbright ly", - "pro blem", - "Ġbiom ass", - "éĻ į", - "§ ī", - "Ġhur dle", - "Ġwavelength s", - "Ġ< <", - "Ġteam ed", - "FF FF", - "ĠS lim", - "om ial", - "Ġunve iled", - "ĠVere in", - "ÙĤ Ø·", - "est ry", - "Ġcl ás", - "Ġch eddar", - "Ġaccus ing", - "ĠScient ific", - "ĠбÑĥд е", - "ĠCyr us", - "ε ÏĦε", - "Ĩĵ ê³ł", - "Ġë³ Ħ", - "Ġcur d", - "Ġrefer rals", - "sh ift", - "åį ķ", - "nik ów", - "Ġm ier", - "Ġconf ronting", - "ê²ĥ ëıĦ", - "aw l", - "Ġtry in", - "Ġê·¸ëŀĺ ìļĶ", - "Ġch iar", - "Ġìĺ¤ëĬ ĺëıĦ", - "æĶ¿ æ²»", - "es que", - "Ġmism os", - "ĠSh ak", - "Ġsoci aux", - "Ġpi ÅŁ", - "ĠkiÅŁ i", - "Ġcy an", - "h ay", - "be w", - "b od", - "ĠÎ ¹", - "ĠMain ly", - "Ñİ ÑĤÑĮ", - "hab itude", - "ĠÑģп окой", - "è·Ł æĪij", - "Ġpre con", - "ĠM andy", - "ðŁ¤ £", - "ill os", - "Ġgr upp", - "Ġcr umble", - "Ġconstru ctor", - "erv ices", - "Ġlight house", - "ĠCon cept", - "ан ÑĤи", - "alt ro", - "h ope", - "ĠAll eg", - "ìĸ´ë ¥¼", - "pie ces", - "oun ter", - "Ġíķĺ ëĭĪê¹Į", - "ĠìĿ¸ íĦ°ë", - "Ġvérit able", - "Ġthread ed", - "bl ind", - "Ĥĺë Ŀ¼", - "Ġtr ays", - "ĠEd ison", - "ĠÃĸ z", - "ĠSte vie", - "Ġl ender", - "Ġbrig ade", - "Ġdeuts che", - "m uffled", - "b art", - "Ġinsan ity", - "Ġsav vy", - "Ġsens ational", - "Ġdere chos", - "ĠM X", - "ĠпÑĢ еп", - "Ġthreat ens", - "Ġrealt Ãł", - "Ġindic ative", - "Ġch ops", - "Ġbenef iting", - "ĠVern on", - "ĠSt rand", - "n un", - "qu ently", - "10 1", - "Ġe el", - "ìĪ Ļ", - "r ints", - "ĠÙħ س", - "Ġب د", - "Ġпо ÑģÑĤÑĢо", - "Ġyap mÄ±ÅŁ", - "Ġol ması", - "Ġi edereen", - "ol é", - "ke f", - "Ġë°ľ ìĥĿ", - "Ġr ained", - "Ġalm ighty", - "ĠвÑĭ д", - "ĠC PR", - "F re", - "Ġinhab ited", - "Ġarb ets", - "Ġa kin", - "а ÑģÑĤв", - "v ania", - "Ġhäuf ig", - "ĠMat te", - "s orry", - "Jen ny", - "ĠгÑĢ ад", - "Ġwh it", - "Ġbro kers", - "å¯ Ł", - "Ġh ine", - "ast en", - "Ġг ÑĢÑĥ", - "M B", - "ĠP RI", - "S ab", - "Ġwrest ler", - "Ġfacil itating", - "Ġeh kä", - "ĠC red", - "Ġ12 7", - "Ġnot hin", - "Ġmand ated", - "å¯ Į", - "ÑĥÑĤ ÑģÑĤв", - "F rank", - "Ġwor s", - "Ġdzie ÅĦ", - "ĠUnder ground", - "Ġznaj du", - "ĠB ä", - "ĠPrin zip", - "аÑĤ елей", - "Ġveter inar", - "Ġsplend id", - "Ġroz p", - "Ġpsych opath", - "ig on", - "Ġh ops", - "Ġc ần", - "ĠX ian", - "Ġtro isième", - "Ġproduct o", - "ĠdeÄŁ er", - "ĠContin uing", - "ив ал", - "c ık", - "Ġmoistur izer", - "Wh ite", - "Ġsi is", - "ĠEver est", - "ien ced", - "Ġcả m", - "ĠJ apon", - "´ìł Ħ", - "Ġten ÃŃan", - "Ġenc anta", - "M m", - "Ġdrop down", - "ĠI ya", - "³´ë ©´", - "Ġword ing", - "ĠSque eze", - "ĠMap le", - "Ġclar ified", - "ĠMun icip", - "ĠRou ge", - "ĠNick i", - "ĠGo o", - "v olt", - "t ek", - "fect ure", - "f red", - "ar rive", - "ãĥ¼ ãģĦ", - "te z", - "E p", - "Ġob ras", - "ĠV ID", - "ĠR iv", - "ĠMod i", - "i be", - "Ġacontec endo", - "Ġim itation", - "Ġcamoufl age", - "Ġspan ning", - "ĠSEC RET", - "ĠOre o", - "ìĨĮë ¦¬", - "Ġh unch", - "Ġca ÅĤe", - "Ġspont aneously", - "ĠPer d", - "Ġet ap", - "ĠHo le", - "ĠDis ability", - "Ġafter life", - "æģ ©", - "Ġtest ified", - "Ġpres up", - "Ġpet roleum", - "Ġcontr ario", - "ĠAss essment", - "ÄŁ lu", - "Ġp ests", - "Ġdil ig", - "ĠвÑģÑĤÑĢ еÑĤ", - "Ġcons équ", - "Ġcann ons", - "Ġcan oe", - "ĠM ile", - "Ġcit oy", - "Ġbe gged", - "ĠMin nie", - "ÅĤy ch", - "Ġprinci pe", - "ÏĢÏĮ ν", - "m niej", - "Ġw ert", - "Ġëĭ¤ë ĵ¤", - "an se", - "Ġunc les", - "Ġprovoc ative", - "Ġinter sections", - "Ġdemocr ats", - "ĠJul ius", - "ин ки", - "yg usal", - "Ġ׾ ×ķ", - "Ġgj orde", - "Ġg asket", - "ĠB ock", - "ĠÄ° n", - "b reat", - "ĠEqu ity", - "ard ı", - "Ġкан але", - "Ġд ней", - "Ġt Ỽi", - "Ġfi xture", - "Ġab uses", - "Ġv aya", - "Ġou vert", - "Ġmultic ultural", - "Ġcontext o", - "ĠSes ame", - "Ġdé pl", - "Ġcons omm", - "ĠPart e", - "Ġp em", - "ĠCon an", - "Ġб ÑĸлÑĮ", - "Ġpersu aded", - "Ġdra ins", - "M oo", - "F ORE", - "Ġб аÑĤ", - "Ġf od", - "ĠProduct s", - "ì§Ħ ì§ľ", - "Ġ\" [", - "ĠW ick", - "ĠNar uto", - "н али", - "ry w", - "Ġl odge", - "Ġin h", - "Ġvont ade", - "Ġdi j", - "ĠJes ús", - "Look ing", - "Ġfore arm", - "ĠIntegr ation", - "ĠHARR IS", - "Ġtool bar", - "le ader", - "Ġsel dom", - "Ġб ÑĢоÑģ", - "ĠK ook", - "он д", - "Ġmon opol", - "Ġmill et", - "Ġl ira", - "ĠAs ians", - "Ġ18 90", - "ci ÄŁim", - "Ġed en", - "ĠIKE A", - "ĠNeigh bor", - "ĠKazu ya", - "ü d", - "Ġpsych edel", - "Ġenvision ed", - "åĿ Ĺ", - "Ġï· »", - "Ġw under", - "ĠBulgar ia", - "B rid", - "Ġmar row", - "Ġdep iction", - "ĠT in", - "ĠPhar ise", - "Ġeinz ige", - "Ġblind ly", - "ãģĽ ãģ¦", - "Ġdef ens", - "D ire", - "Ġvibr ating", - "Ġtroll s", - "Ġdisrespect ful", - "Ġw od", - "Ġstimul i", - "Ġcreep ing", - "Ġcla irement", - "Ġsc ariest", - "Ġdécouv rir", - "Ġ10 4", - "ĠвеÑĢ Ñħ", - "ĠÅĤ at", - "Ġróż ne", - "Ġbar ley", - "ĠRe pl", - "ĠT we", - "k ke", - "ĠãģĿ ãĤĮ", - "ĠRed mi", - "ĠMet roid", - "Ġή ÏĦαν", - "Che ck", - "ĠS EN", - "Ġ ido", - "ÑĤоÑĢ ии", - "ó p", - "UN KNOWN", - "Ġänd ern", - "ĠJu ice", - "ĠGes icht", - "å°± æľĥ", - "ĠнаÑģÑĤ олÑĮко", - "íĥ ķ", - " Ń", - "ex hales", - "Ġì´ ī", - "Ġj sem", - "ÏĢ ÏīÏĤ", - "Ġit t", - "ëªħ ìĿ´", - "Ġrem ix", - "Ġbloss oms", - "ĠR enee", - "is ations", - "ìĬ¤í Ħ°", - "Ġë³´ ìĿ´ëĬĶ", - "uest as", - "op edia", - "ĠA im", - "ìĿ´ì¦ Ī", - "sc ene", - "Ġleak age", - "uck t", - "S ad", - "A sk", - "Ġsusp ense", - "Ġimp ost", - "ĠStrateg ic", - "ĠIt ÃŃs", - "âĢ Į", - "Ġkey boards", - "Ġam using", - "og r", - "id erman", - "ŀ ĸ", - "Ġв ижÑĥ", - "Ġd ips", - "Ġapolog ized", - "ĠST AR", - "Ġesc uela", - "ĠC hing", - "н ениÑı", - "Ġë¶Ģë¶Ħ ìĿ´", - "ĠFle et", - "Ġs amb", - "Ġentsprech end", - "Ġelectrod es", - "ĠFrei heit", - "æĪij ä¸įçŁ¥éģĵ", - "ĠSh rim", - "iÃŁ e", - "Ġselect ions", - "Ġfor di", - "Ġd oss", - "Ñı Ñĩ", - "Ġdiscrimin ate", - "ĠAu ÃŁerdem", - "Ġdesenvol v", - "ĠIntern al", - "ĠBened ict", - "å¯ Ĩ", - "ĠSh iv", - "M issy", - "Ġоб наÑĢÑĥж", - "Ġна ÑģÑĤÑĢо", - "Ġcontrol ar", - "ĠL ia", - "Ġopio ids", - "ant u", - "Ġcup board", - "æģ IJ", - "г е", - "acht s", - "Ġcur ated", - "Ġx em", - "Ġwe ary", - "Ġbre thren", - "Ġbudget ing", - "Ġpour tant", - "éļ »", - "ais ia", - "ĠоÑĤв еÑĩ", - "ĠG IS", - "μ αι", - "Ġש×Ķ ×ķ×IJ", - "Ġsa ud", - "Ġl Ỽ", - "Ðķ Т", - "ub ine", - "ĠнÑĥж ен", - "Ġkidna pping", - "Ġbr at", - "ĠTer re", - "ĠMon et", - "Ġë§Ī ìĬ¤íģ", - "Ġflash y", - "ĠIS BN", - "Ġfreel ance", - "i age", - "Ġjun ge", - "ì¶ ©", - "cer al", - "ĠÑĤоÑĩ ки", - "Ġform ulate", - "ĠF ER", - "ĠDart mouth", - "ìľ¼ë ©´ìĦľ", - "å¢ ĥ", - "ow iÄħ", - "ĠëĶĶ ìŀIJ", - "Ġreg iment", - "Ġmetabol ismo", - "ĠP arr", - "Ġ충 ë¶Ħ", - "Ġsan ity", - "ĠL al", - "ĠG ö", - "ĠG la", - "Ġprot o", - "Ġmicroscop ic", - "Ġk ang", - "ĠSc alia", - "Ġp ug", - "ĠSc ore", - "ĠSav annah", - "Ġgard e", - "ĠN OR", - "å°į åIJ§", - "Ġsche int", - "Ġp óÅĤ", - "Ġcor ri", - "Ġbr ute", - "Ġ ÅĤad", - "ä»ĸ 们", - "Ġsucceed ing", - "Ġbicy cles", - "N on", - "Ġseek ers", - "Ġuncond itional", - "Ġrhy mes", - "ĠGar age", - "Ġinv oice", - "Ġcan vi", - "ne ck", - "Ġcustom izable", - "irit ual", - "Que en", - "íķĺ ìĭľëĬĶ", - "Ġpower less", - "Ġcs ak", - "ä¸į ä¼ļ", - "is oft", - "Ġìłķ íĻķ", - "Ġnh ân", - "ĠM AND", - "ĠH af", - "Ġrevol ves", - "ä¹Ł åı¯ä»¥", - "ov an", - "ar oo", - "ĠGr ind", - "éĽ ª", - "Ġindispens able", - "Ġconsult ed", - "ĠClin ical", - "A cc", - "Ġol hos", - "Ġmon ter", - "ĠH ana", - "et ah", - "Ġva an", - "Ġt igers", - "Ġcau cus", - "ðŁĺ Ĥ", - "³´ì ŀIJ", - "pow ers", - "ium s", - "ĠíĨ łë", - "Ġtrad icional", - "Ġreson ated", - "Ġìĭł 기", - "th em", - "Ro bert", - "Ġelement o", - "Ġant id", - "Ġоб Ñģ", - "Ġnat ives", - "Ġlo ca", - "ow ment", - "ĠT ight", - "Ġ æĢĿ", - "Ġmel an", - "ĠN ue", - "am is", - "Ġsor gen", - "as ına", - "H ome", - "ĠPUB G", - "Ġaw fully", - "ĠSh ore", - "ĠPer ché", - "ĠL au", - "ĠCind erella", - "ĠCh est", - "Ġsem antic", - "Ġdesert ed", - "ĠMom o", - "ĠHern andez", - "gen es", - "ĠAd ult", - "иÑĩеÑģ кого", - "osh ima", - "ĠcaracterÃŃst icas", - "ĠK L", - "´ìŀ ¥", - "oc ar", - "Ġfeh lt", - "Ġd ruk", - "ĠPop py", - "EN GLISH", - "ĠVerg leich", - "B rien", - "Ġrec omp", - "ĠÑģ д", - "Ġmer ger", - "Ġmarket ers", - "Ġhoney moon", - "Ġpen so", - "Ġbell i", - "еÑĤ Ñĥ", - "Ġbank er", - "Cam era", - "ĠSt all", - "ĠSt amp", - "ĠB ite", - "еж де", - "Ġs ür", - "Ġgü ç", - "ĠPas sover", - "ĠBug ün", - "ĠÑģожал ениÑİ", - "Ġн из", - "Ġman ure", - "Ġglac ier", - "è« ĩ", - "RA Y", - "ter ror", - "Ġsal ads", - "Ġhur ricanes", - "ĠDesign er", - "ator io", - "Ġfact ual", - "ĠTam my", - "Ġзв ÑĥÑĩ", - "Ġintrodu ctions", - "Ġhouse keeping", - "Ġh anger", - "ëĭ ĺë", - "ak te", - "ĠCol a", - "' ]", - "ĠG ender", - "оÑĢ он", - "ip se", - "ic ias", - "Ġsuccess ive", - "Ġpolit ic", - "Ġhö her", - "ĠQ iao", - "ĠG imme", - "Ġл ож", - "Ġse b", - "ĠWe iter", - "ĠSak ura", - "ĠB oulder", - "ĠAm érica", - "peÅĤ nie", - "Ġtecn ologÃŃa", - "ish ops", - "f ur", - "Ġmoon light", - "Ġdispers ed", - "Ġre z", - "ен ное", - "алÑĮ нÑĥÑİ", - "ĠTw elve", - "ĠH OR", - "ìĭ¤í ŀĪ", - "il age", - "Ġshad ed", - "Ġres umes", - "ĠPe anut", - "ĠM ILL", - "ap ons", - "ĠU FC", - "ĠSo le", - "Ġjoy stick", - "ĠOliv ier", - "war ming", - "Ġsyll abus", - "Ġоб Ñīе", - "Ġhi á»ĩn", - "Ġfest a", - "Ġcr adle", - "ĠZ ac", - "Ġremem brance", - "Ġê°Ļ ìķĦìĦľ", - "ĠpiÄĻ k", - "Ġco exist", - "ĠV II", - "Ġá reas", - "Ġu waż", - "Ġobser vers", - "Ġmännisk or", - "co on", - "ĠD AM", - "Ġnas zym", - "Ġall igator", - "ĠFree ze", - "ĠEst ate", - "ĠÑĤÑĢ ади", - "Ġunder cover", - "Ġn ies", - "ĠFeh ler", - "pl in", - "ĠK abul", - "il ate", - "Ġê³ł ìĸij", - "Ġm op", - "ìĦ ¼", - "Ġand erer", - "ĠK ELL", - "ок и", - "Ġж еÑģÑĤ", - "Ġgra zing", - "Ġda ÃŃ", - "Ġcapital ize", - "Ġa pex", - "Ġnurt uring", - "Ġcort ar", - "Ġcontr ac", - "ımız ı", - "Ġtand em", - "éĥ½ æľī", - "ge ment", - "ĠÑģиÑģÑĤем а", - "Ġman que", - "ia jÄħ", - "W OR", - "Ġا ب", - "Ġcart s", - "AN O", - "Ġë°Ľ ê³ł", - "ĠC ena", - "ĠBi ology", - "id ar", - "Ġa ż", - "er ne", - "an u", - "Ġthank ed", - "Ġsubmar ines", - "Ġman ic", - "Ġм оз", - "ä¼ Ĭ", - "inst ant", - "ess ential", - "Ġsam urai", - "Ġpast i", - "Ġal an", - "Ġbro ch", - "Ġb aker", - "ĠGu ill", - "¨ ¼", - "Ġwithd rawn", - "ëĭ Ŀ", - "Per fect", - "qu ency", - "Ġstream lined", - "Ġ13 00", - "´ë ıĦ", - "Ġëĸ łë", - "Ġãģ¯ ãģĦ", - "Ġh vad", - "ä¸Ģå®ļ è¦ģ", - "Ġverb ally", - "ĠK ons", - "Ġì¡° ìĭ¬", - "Ġdie z", - "æİ° æİ°", - "Ġchuck ling", - "ĠM ih", - "Ġrall ies", - "Ġman ter", - "Ġearn est", - "s uper", - "Ġge ce", - "ĠR end", - "ĠGer ade", - "jen igen", - "ĠV all", - "Ġìŀ ĪëĤĺ", - "ĠÑģказ ала", - "Ġtrabal h", - "ĠнаÑĪ ем", - "Ġм еÑħ", - "ik it", - "Ġnoun s", - "Ġneurolog ical", - "Ġmotiv ational", - "ĠMcM ahon", - "ĠFin ished", - "Ġë³´ ìĿ´", - "ĠField s", - "Ġadoles cents", - "ĠT isch", - "ĠNe ben", - "ĠFl owers", - "ĠEner g", - "Ġdire t", - "ĠTh i", - "ĠP icas", - "æĥ ľ", - "æĢİä¹Ī æł·", - "Ġav ete", - "ĠF ors", - "ĠChap el", - "N ão", - "E t", - "ĠÑģод еÑĢж", - "ren o", - "Ġs ven", - "Ġdost ÄĻp", - "ne e", - "ĠSnap dragon", - "ĠID s", - "ìķĺ ëĬĶëį°", - "ר ×ļ", - "Ġsun flower", - "Ġperpet ual", - "ç³ ĸ", - "Ġkn ights", - "Ġg ird", - "ĠTo ld", - "Ġvolcano es", - "Ġadvers ary", - "ĠEconom y", - "Ġextra pol", - "Ġbl uetooth", - "Ġzoom ing", - "Ġsk ys", - "Ġgen ial", - "ÃŃcul os", - "amb re", - "Ġм еÑĢ", - "Ġteen y", - "Ġstress ing", - "ìķ Į", - "ON Y", - "Ġtransluc ent", - "Ġround ing", - "Ġgr ues", - "×Ļ׳ ×Ķ", - "ap rès", - "Ġprue ba", - "Ġpoly gon", - "Ġblue berry", - "ĠProgram m", - "Ġtren ches", - "Ġse bagai", - "Ġpal ate", - "Ġla ude", - "Ġbehav ed", - "Ġlongitud inal", - "ĠMod ule", - "Ġadm ir", - "λ ι", - "G reg", - "Ġwy st", - "Ġpropag ate", - "Ġmold s", - "ĠT ub", - "ĠL oud", - "ust o", - "Ġun stoppable", - "Ġreinfor cing", - "éĿŀ常 çļĦ", - "ĠпÑĢоблем а", - "Ġpot encial", - "Ġhe mp", - "ìŀ Ķ", - "ठ¯", - "Ġopt ic", - "Ġerfolg reich", - "Ñģ Ñĭ", - "олÑĮ ÑĪе", - "ur st", - "ĠPo is", - "Ġrespond ents", - "Ġneh me", - "ĠEx ternal", - "ol ate", - "H yun", - "Ġquart z", - "Ġmathematic ian", - "Ġbás icamente", - "Ġa il", - "ìł ľë¥¼", - "att utto", - "Ġno oit", - "Ġaff lict", - "ĠOl ga", - "èŃ ·", - "Ġна ÑĤ", - "Ġd ites", - "Ġreal idade", - "Ġk än", - "Ġuniqu eness", - "Ġpad res", - "Ġsubs idi", - "Ġpige ons", - "β α", - "st ad", - "Ġder en", - "ĠС лед", - "d oo", - "ĠопиÑģ ании", - "Ġam ber", - "Ġgoose bumps", - "ĠfrÃ¥ gor", - "ĠV ital", - "ĠIsrael ites", - "w asser", - "Is n", - "Ġcomm its", - "ĠSTE VEN", - "ĠBev ölker", - "uit ive", - "Ġleg en", - "Ġbr uk", - "иÑĢов ан", - "yn en", - "hel m", - "Ġgener ational", - "ĠL ändern", - "οι ÏĢÏĮν", - "uz u", - "Ġcall er", - "он ÑĮ", - "üm ü", - "Ġbes ar", - "Ġpl ats", - "Ġmig rated", - "Ġj ap", - "ĠW AR", - "Ġdis sect", - "ĠZus ch", - "ĠZe iten", - "ĠL ions", - "ĠD F", - "â Ķ", - "ки в", - "Ġpedest rians", - "ĠMar ilyn", - "d ock", - "Ġy ht", - "Ġre incarn", - "ĠSon o", - "ĠGrow th", - "ÑĥÑģ ов", - "Ġdun geons", - "Ġbag us", - "k ich", - "ĠÑĥ кÑĢаÑĹ", - "éĨ «", - "ĠK eller", - "chem istry", - "J apanese", - "Ġwill st", - "Ġdecomp osition", - "ĠÑģÑĤ ен", - "Ġrev ived", - "íķĻ êµIJ", - "ĠÅ ĵ", - "ä½ IJ", - "ìĭ ¸", - "ipp y", - "Ġhour ly", - "j än", - "ĠWork shop", - "Ŀ¼ ìĦľ", - "Ġcu arto", - "Ġpat rim", - "ĠB urch", - "ĠìŀĪ 기", - "Ġhe pat", - "Ġh Ãłng", - "ĠëĮĢ íķ´", - "ĠваÑĪ и", - "Ġre work", - "Ġpar se", - "Ġçıkt ı", - "ĠS ax", - "ĠMong o", - "ĠAa ah", - "ram ble", - "D J", - "Ġstabil ized", - "ĠSpe ech", - "Book s", - "Ġhur dles", - "ĠW O", - "ĠLamb org", - "Ġ19 33", - "Ġvor bere", - "Ġclin ically", - "Ġbreat htaking", - "ĠGate way", - "пеÑĢв ÑĭÑħ", - "ut ers", - "Ġë¹ µ", - "Ġyet er", - "Ġpull ey", - "Ġmuff in", - "ĠPre fer", - "ĠP ence", - "Ġinform ação", - "ìĬ¤í Ĭ¸ë", - "ãĤ¸ ãĥ£", - "ĠTur tle", - "ĠReg ina", - "ĠLo ad", - "do es", - "pan ze", - "¸ Ķ", - "Ġmin a", - "ĠLatin os", - "amm ers", - "ĠT ort", - "ĠBey once", - "имо ÑģÑĤи", - "ĠвопÑĢоÑģ Ñĭ", - "Ġbul un", - "èĢĮ å·²", - "ine k", - "bere ich", - "Ġpast ure", - "ĠO A", - "ĠM elt", - "ĠEt t", - "ĠD Y", - "Ġob wohl", - "Ġle agues", - "ÑĤ еÑģÑĮ", - "Ġк ÑĥÑģ", - "Ġv ors", - "Ġto pp", - "ograph ical", - "as st", - "Ġl indo", - "Ġë°Ŀ íĺĶ", - "Ġré fl", - "Ġclim bs", - "Ġv arsa", - "Ġmethy l", - "ĠKar ere", - "Æ°á» Ł", - "R ad", - "Ġprepared ness", - "он Ñĩ", - "ĠO D", - "ĠC GI", - "Ġठ®", - "Ġspeech less", - "Ġlas ci", - "Ġbol ag", - "ĠÑħоÑĩ еÑĤÑģÑı", - "Ġgr ieving", - "ĠJohann es", - "ĠCar roll", - "ad aki", - "Ī ¬ë", - "ĠsÅĤ u", - "Ġinner halb", - "Ġgymn astics", - "п ÑĢи", - "if iques", - "Ġkar ate", - "Ġdom u", - "ãģĿãĤĮ ãģ§", - "OTH ER", - "Ġdemand é", - "Ġbook let", - "ĠKy oto", - "Ġw oh", - "ĠMar ÃŃa", - "viol ent", - "J E", - "Ġl óg", - "Ġbrut ally", - "c ot", - "ĠÙħ ÛĮ", - "ĠWars z", - "å® Ī", - "w ol", - "Ġmik ä", - "ĠPron ounce", - "ĠBrend an", - "Ġr oup", - "Ġital iano", - "å¦Ĥ æѤ", - "Ġкомп ÑĮÑİÑĤ", - "Ġur ging", - "ed es", - "Ġcarbon o", - "ĠRichards on", - "ĠÐĿ аÑĩ", - "ĠTra iner", - "ĠCrime a", - "Ġdi apers", - "Ġco vet", - "ĠMah ar", - "ĠH utch", - "ĠAus w", - "ber ty", - "Ġind ifferent", - "кÑĢ еÑĤ", - "uld ade", - "Ġhar ms", - "¢ ÙĨ", - "les ia", - "Ġg io", - "ĠMist ress", - "ĠK nox", - "ĠFRE E", - "Ġë £¨ë", - "ĠнаÑĪ а", - "Ġinvinci ble", - "Ġma iden", - "ĠJ eez", - "Ġbre ve", - "po le", - "Ġcritic isms", - "ĠRus ia", - "ठ®", - "ph in", - "ĠComp are", - "ĠB ON", - "Ġsne aking", - "ĠR ails", - "ĠG eral", - "Ġ195 3", - "H ola", - "Ġоп ÑĭÑĤ", - "Ġrain forest", - "Ġbel um", - "ĠOb i", - "ĠIS S", - "ãĤĮ ãģªãģĦ", - "ĠС в", - "Ġbl ond", - "Ġwz gl", - "Ġpowiedz iaÅĤ", - "Ġch oking", - "ĠSong s", - "ĠBir az", - "Ġyell s", - "Ġstyl ist", - "ÏĮ ÏĦε", - "Ġsch reiben", - "ĠJ aw", - "ĠEle ven", - "ĠR if", - "/ .", - "Ġìĺ¤ë ŀľë§Į", - "Ġtreat ies", - "uff ed", - "ĠâĪ Ĵ", - "Ġroof s", - "à¹Ģภª", - "Ġë »", - "Ġspark le", - "ĠK iev", - "ĠAr gu", - "ere cht", - "ĠÐĿад о", - "ĠF IL", - "Ġmol ta", - "ĠDe vi", - "Ġcam pe", - "Ġbene vol", - "ĠT ough", - "Ġmo im", - "Ġevac uate", - "Ġer rado", - "å© Ĩ", - "ÑĢÑĥ го", - "Ġíİ ĺ", - "ĠÎĵ ια", - "Ġweak en", - "Ġillum inated", - "Ġsig lo", - "ĠV acc", - "и ей", - "al is", - "ĠÑĥ ÑģÑĤÑĢой", - "Ġdon a", - "ÅĤ os", - "ü man", - "Ġprodu cción", - "Ġcl ot", - "ĠM ango", - "Ġune asy", - "Ġsh uts", - "ĠExam ples", - "ve ll", - "e be", - "Ġprompt ly", - "ĠT eles", - "ĠпÑĢоÑĪ л", - "Ġpu erta", - "Ġüber zeug", - "Ġco ch", - "so cial", - "ĠB enson", - "ĠM eth", - "ĠEx ped", - "Ġsupplement al", - "Ġconce ive", - "Ġ×ĺ ×ķ×ij", - "Ġcapt ivity", - "ıĻ ìķĪ", - "ĠÑħ Ñĥд", - "form ing", - "Ġupload s", - "Ġturbul ence", - "j oint", - "Ġsatisf actory", - "ĠAn ime", - "Ġwash es", - "Ġliber als", - "ĠSun shine", - "ĠRE AL", - "ub lik", - "b inary", - "T ony", - "Ġpolar ized", - "Ġenrich ed", - "t aking", - "ĠëģĿ ëĤĺ", - "Ġple asures", - "Ġex termin", - "in ese", - "at l", - "v är", - "аÑĢ Ñĭ", - "Ġmy ÅĽ", - "n arrator", - "Ġод ном", - "Ġnaj wiÄĻ", - "Ġmobil ize", - "Ġmill or", - "Ġat a", - "æ· ·", - "ĠpolÃŃt ico", - "Ġple ad", - "Ġpain ters", - "ĠS ow", - "о ÑĦ", - "ĠìĺĽ ëĤł", - "ĠÑĩ ÑĤоб", - "Ġs abor", - "ĠUnd ert", - "ĠJER RY", - "Å¡ ÃŃ", - "Ġë° ĸìĹIJ", - "Ġpréc éd", - "Ġannot ation", - "ĠI naudible", - "Ġtext ured", - "Ġfisher man", - "v ordan", - "icher ung", - "Ġìłģ ìĿ´", - "Ġge zeigt", - "Ġmand ates", - "Ġbe ak", - "ĠTW O", - "ĠAk bar", - "il ian", - "Ġtiế p", - "Ġsuperior ity", - "ink u", - "Ġl ys", - "ĠF CC", - "ĠC PA", - "ust ering", - "nic os", - "an ja", - "Ġch ills", - "ĠC age", - "Ġse aling", - "Ġsa ç", - "Ġded ans", - "ĠAl ger", - "Ġspe zie", - "Ġcol oss", - "ıy ı", - "clock wise", - "Ġexact amente", - "Ġ iemand", - "am ı", - "Ġmand ar", - "ra j", - "f aced", - "ag ua", - "Ġê¹ Ķë", - "Ġins besondere", - "Ġdri zzle", - "Ġdimin ish", - "ĠY oda", - "A I", - "Ġbil miyorum", - "ĠM MA", - "ateg ory", - "ĠпеÑĢ еп", - "Ġparticip ar", - "Ġnormal ized", - "Ġcomplex ities", - "æ´ ²", - "æİ §", - "аÑĢ ов", - "m ist", - "ich a", - "Gr oup", - "Ġresil iency", - "Ġnog le", - "ĠCN C", - "pr ü", - "Ġphysic ists", - "н ок", - "L I", - "Ġstuff s", - "Ġsist emas", - "Ġinterfer ing", - "ĠMar vin", - "ér cito", - "ĠìĹĨ ê³ł", - "Ġson ic", - "Ġequ iv", - "Ġab ord", - "ĠRam en", - "Ġ0 9", - "med im", - "at iques", - "Ġдел аÑİÑĤ", - "Ġunanim ously", - "Ġsk irts", - "ĠíĬ¹ ë³Ħ", - "ĠP rix", - "k ami", - "Ġfr uition", - "Ġbirthday s", - "ик ом", - "Ġinaug ural", - "Ġcorrel ate", - "ĠT ory", - "ĠëĤĺ ìģ", - "Ġde w", - "ĠPre cis", - "ih i", - "Ġë¬¸ìłľ ê°Ģ", - "Ġc iting", - "ĠL ana", - "ĠK ag", - "Ġplay through", - "ĠProt ocol", - "fr ist", - "hov ah", - "Ġmerc iful", - "Ġb ilingual", - "ĠG uitar", - "r h", - "Ġglam orous", - "ĠVik ings", - "ĠOoo oh", - "íķĺ ëĬĶëį°", - "ĠUg anda", - "Ġcollaps es", - "ent ry", - "Ġantioxid ants", - "ëĤ ĺë", - "ÑĪ аÑı", - "Ġtri via", - "Ġgä ller", - "Ġfun gi", - "Ġmil ks", - "Ġd icht", - "μ η", - "po ke", - "ĠвÑĭп ÑĥÑģк", - "Ġfeed er", - "ĠAl cohol", - "h ower", - "Ġdes erving", - "ĠRe bel", - "ios is", - "Ġ10 3", - "Ġhand out", - "Ġen m", - "Ġland lords", - "Ġge ology", - "r ils", - "Ġco bra", - "ĠV old", - "ĠP anch", - "ĠGRE G", - "Ġpr oss", - "Ġbrac elets", - "ĠV ega", - "Ġroz um", - "æ¬ ¾", - "аз д", - "ĠLy nd", - "ĠHon ors", - "Ġsurrend ered", - "Ġlibr arians", - "12 5", - "ĠÑģ иг", - "Ġuniform ly", - "ĠE agles", - "ìķ Ļ", - "иÑĤ ан", - "and id", - "ĠìłĪë ĮĢ", - "ĠØ ¶", - "Ġarrest s", - "ĠCS V", - "ĠAzerbai jan", - "ort ic", - "ĠD X", - "ĠAdvent ures", - "Ġab us", - "ĠF au", - "Ġschlim m", - "Ġratt ling", - "Ġconsum es", - "ĠTol kien", - "Ġresurrect ed", - "ĠX Y", - "íĬ¸ ê°Ģ", - "ĠвÑĭ ÑģÑĤÑĥп", - "ĠAng ie", - "żen ia", - "M ic", - "ĠShe ila", - "acht et", - "Ġover st", - "Ġl â", - "Ġine ffective", - "æĿ ¡", - "æĢİä¹Ī äºĨ", - "å¿ Ļ", - "Ġwicht iger", - "Ġv ino", - "Ġp um", - "Ġang led", - "ĠP ione", - "ĠM ỹ", - "ãģĿãĤĮ ãģ¯", - "wo ÅĽÄĩ", - "d raw", - "ั à¹Ī", - "mark ets", - "Ġcaf es", - "ĠC em", - "â Ŀ¤", - "ĠS uit", - "M K", - "Ġemphas izes", - "Ġtort illa", - "Ġmejor ar", - "ĠSur viv", - "cast ing", - "Ġeduc ación", - "ĠG um", - "u ely", - "ĠìĹ¬ê¸° ëĬĶ", - "Ġstretch y", - "en ça", - "Ġwith hold", - "Ġex iting", - "Ġenthal py", - "ĠTrans it", - "ıl mÄ±ÅŁ", - "al ies", - "Ġsal var", - "Ġlean ed", - "ĠgroÃŁ es", - "Ġf itt", - "ак и", - "S arah", - "Ġhost el", - "Ġfinger na", - "Ġnadzie jÄĻ", - "w ives", - "R ec", - "Ġsp ool", - "аÑĤ ов", - "ĠEn emy", - "Ġf ury", - "Ġdet ta", - "ĠF ay", - "éļ ¨", - "Ñı ÑİÑĤ", - "Ġaproxim adamente", - "Ġsil os", - "Ġmag ist", - "Ġc ree", - "ĠKr ank", - "ĠD OWN", - "Ġstart led", - "Ġre born", - "ĠUm welt", - "ĠSuz anne", - "ни ÑĨÑĭ", - "out ez", - "ĠJ AC", - "y ards", - "rad as", - "ra u", - "ip ts", - "h ail", - "Ġparagraph s", - "Ġme glio", - "Ġisol ating", - "Ġace ite", - "ĠH arsh", - "Ġcy st", - "ĠBlock chain", - "ĠÑħоÑĢоÑĪ ий", - "Ġvirt uous", - "Ġinvestig ación", - "Ġdev oir", - "Ġmast urb", - "ĠS ale", - "ÙĬر Ø©", - "ĠÎ §", - "ĠStra ÃŁen", - "Ġdi kk", - "Ġa fore", - "ĠJung kook", - "Ġcho ciaż", - "ĠDebat te", - "Ġweird ly", - "Ġvia je", - "reg ist", - "H elp", - "Ġkind eren", - "Ġform ulated", - "Ġenf im", - "ĠTow ards", - "ко ÑĹ", - "iver ing", - "ĠдеÑĤ и", - "char ger", - "Ġpur l", - "Ġacadem ically", - "ĠNur se", - "Ġdel eting", - "ay o", - "Ġref usal", - "Ġdepict s", - "ĠDr acula", - "Ġtoast ed", - "ĠZomb ie", - "ĠSuper ior", - "ĠB old", - "Ġquizz es", - "Ġg le", - "4 50", - "Ġcome ço", - "yn n", - "Ġver st", - "ĠO laf", - "Ġpom oc", - "ĠS ask", - "ë ĺ", - "ĠT CP", - "ĠProper ty", - "íķĺ ì£ł", - "à¸ľ ม", - "bo om", - "ar os", - "ĠÑĢоÑģÑģ ий", - "ĠбÑĭв аеÑĤ", - "åĩº åİ»", - "ĠìĿ´ìķ¼ 기를", - "Ġcomb ien", - "v acc", - "Ġeben falls", - "par a", - "Ġз м", - "Ġdesper ation", - "ord re", - "Ġש׾ ×Ļ", - "Ġgener ously", - "ĠÐŀ к", - "Ġorb iting", - "> ", - "?", - "@", - "A", - "B", - "C", - "D", - "E", - "F", - "G", - "H", - "I", - "J", - "K", - "L", - "M", - "N", - "O", - "P", - "Q", - "R", - "S", - "T", - "U", - "V", - "W", - "X", - "Y", - "Z", - "[", - "\\", - "]", - "^", - "_", - "`", - "a", - "b", - "c", - "d", - "e", - "f", - "g", - "h", - "i", - "j", - "k", - "l", - "m", - "n", - "o", - "p", - "q", - "r", - "s", - "t", - "u", - "v", - "w", - "x", - "y", - "z", - "{", - "|", - "}", - "~", - "\u00a1", - "\u00a2", - "\u00a3", - "\u00a4", - "\u00a5", - "\u00a6", - "\u00a7", - "\u00a8", - "\u00a9", - "\u00aa", - "\u00ab", - "\u00ac", - "\u00ae", - "\u00af", - "\u00b0", - "\u00b1", - "\u00b2", - "\u00b3", - "\u00b4", - "\u00b5", - "\u00b6", - "\u00b7", - "\u00b8", - "\u00b9", - "\u00ba", - "\u00bb", - "\u00bc", - "\u00bd", - "\u00be", - "\u00bf", - "\u00c0", - "\u00c1", - "\u00c2", - "\u00c3", - "\u00c4", - "\u00c5", - "\u00c6", - "\u00c7", - "\u00c8", - "\u00c9", - "\u00ca", - "\u00cb", - "\u00cc", - "\u00cd", - "\u00ce", - "\u00cf", - "\u00d0", - "\u00d1", - "\u00d2", - "\u00d3", - "\u00d4", - "\u00d5", - "\u00d6", - "\u00d7", - "\u00d8", - "\u00d9", - "\u00da", - "\u00db", - "\u00dc", - "\u00dd", - "\u00de", - "\u00df", - "\u00e0", - "\u00e1", - "\u00e2", - "\u00e3", - "\u00e4", - "\u00e5", - "\u00e6", - "\u00e7", - "\u00e8", - "\u00e9", - "\u00ea", - "\u00eb", - "\u00ec", - "\u00ed", - "\u00ee", - "\u00ef", - "\u00f0", - "\u00f1", - "\u00f2", - "\u00f3", - "\u00f4", - "\u00f5", - "\u00f6", - "\u00f7", - "\u00f8", - "\u00f9", - "\u00fa", - "\u00fb", - "\u00fc", - "\u00fd", - "\u00fe", - "\u00ff", - "\u0100", - "\u0101", - "\u0102", - "\u0103", - "\u0104", - "\u0105", - "\u0106", - "\u0107", - "\u0108", - "\u0109", - "\u010a", - "\u010b", - "\u010c", - "\u010d", - "\u010e", - "\u010f", - "\u0110", - "\u0111", - "\u0112", - "\u0113", - "\u0114", - "\u0115", - "\u0116", - "\u0117", - "\u0118", - "\u0119", - "\u011a", - "\u011b", - "\u011c", - "\u011d", - "\u011e", - "\u011f", - "\u0120", - "\u0121", - "\u0122", - "\u0123", - "\u0124", - "\u0125", - "\u0126", - "\u0127", - "\u0128", - "\u0129", - "\u012a", - "\u012b", - "\u012c", - "\u012d", - "\u012e", - "\u012f", - "\u0130", - "\u0131", - "\u0132", - "\u0133", - "\u0134", - "\u0135", - "\u0136", - "\u0137", - "\u0138", - "\u0139", - "\u013a", - "\u013b", - "\u013c", - "\u013d", - "\u013e", - "\u013f", - "\u0140", - "\u0141", - "\u0142", - "\u0143", - "\u0120t", - "\u0120a", - "\u0120th", - "in", - "er", - "\u0120w", - "\u0120s", - "ou", - "\u0120the", - "re", - "on", - "at", - "en", - "\u0120c", - "it", - "is", - "\u0120b", - "nd", - "\u0120d", - "\u0120m", - "\u0120h", - "\u0120o", - "ing", - "es", - "\u0120p", - "\u0120to", - "an", - "\u0120f", - "or", - "ll", - "\u0120I", - "\u0120l", - "\u0120y", - "ar", - "\u0120g", - "\u0120you", - "ed", - "\u0120and", - "\u0120in", - "\u0120of", - "as", - "\u0120n", - "om", - "ic", - "\u0120that", - "us", - "et", - "ve", - "al", - "ow", - "le", - "\u0120is", - "\u0120e", - "\u0120it", - "ot", - "'s", - "\u0120be", - "ion", - "\u0120T", - "\u0120wh", - "\u0120A", - "ent", - "\u0120S", - "\u0120re", - "ay", - "\u0120we", - "\u0120on", - "ere", - "\u0120ha", - "ut", - "ac", - "id", - "ig", - "os", - "ke", - "ver", - "im", - "\u0120\u00d0", - "\u0120Th", - "am", - "all", - "\u0120for", - "el", - "ch", - "ro", - "\u0120this", - "\u0120st", - "\u0120W", - "\u0120u", - "ad", - "out", - "ir", - "ld", - "ct", - "\u0120k", - "if", - "\u0120go", - "..", - "\u00d0\u00be", - "ith", - "ly", - "ht", - "qu", - "\u0120-", - "\u0120do", - "\u0120j", - "\u0120have", - "\u0120B", - "\u0120an", - "\u0120with", - "\u0120are", - "\u0120r", - "\u0120de", - "\u0120se", - "\u0120so", - "\u0120v", - "st", - "ill", - "ur", - "\u0120li", - "\u0120M", - "est", - "od", - "ally", - "'t", - "ust", - "\u0120as", - "\u0120C", - "ce", - "\u0120me", - "\u00d0\u00b0", - "\u00d0\u00b5", - "il", - "\u0120H", - "\u0120was", - "ter", - "th", - "\u0120can", - "ant", - "\u0120com", - "our", - "ight", - "\u0120Y", - "ation", - "\u0120And", - "ol", - "\u0120sh", - "\u00d1\u0124", - "op", - "se", - "\u0120not", - "\u0120So", - "\u0120ne", - "un", - "\u0120ab", - "\u0120like", - "\u0120at", - "\u0120D", - "ie", - "\u0120he", - "\u0120con", - "\u0120ch", - "ore", - "\u0120al", - "\u0120or", - "\u0120qu", - "\u0120O", - "ome", - "ra", - "ul", - "\u0120N", - "pp", - "\u0120your", - "ould", - "\u0120P", - "\u0120fr", - "ge", - "ers", - "'re", - "\u00d0\u00b8", - "\u0120they", - "\u0120what", - "use", - "\u0120all", - "\u0120The", - "\u0120L", - "ess", - "em", - "\u0120kn", - "\u0120just", - "art", - "\u0120pro", - "very", - "um", - "\u0120lo", - "\u0120\u00ec", - "\u0120my", - "ok", - "\u0120ex", - "ab", - "\u0120there", - "\u0120but", - "\u0120know", - "\u0120su", - "\u0120G", - "\u00d1\u0123", - "\u0120E", - "\u0120ma", - "\u00d0\u00be\u00d0", - "\u0120en", - "\u0120about", - "\u0120It", - "ist", - "\u0120wor", - "ri", - "ind", - "\u0120one", - "ate", - "and", - "ink", - "\u0120le", - "ort", - "'m", - "\u0120F", - "ich", - "\u00d1\u0122", - "ide", - "\u0120get", - "\u0120out", - "...", - "\u0120will", - "\u00e3\u0123", - "ive", - "\u00d0\u00bd", - "\u0120from", - "ain", - "\u0120We", - "\u0120up", - "pe", - "res", - "ca", - "\u0120R", - "\u0120if", - "\u0120pl", - "\u0120don", - "ack", - "\u01201", - "\u0120\"", - "\u0120tr", - "\u0120us", - "\u0120Wh", - "ity", - "\u0120J", - "\u0120You", - "\u0120here", - "her", - "\u0120some", - "oug", - "ak", - "ard", - "\u0120going", - "\u0120un", - "ment", - "\u0120think", - "\u0120pe", - "end", - "\u0120(", - "cause", - "\u0120tim", - "ast", - "\u00c3\u00a9", - "\u0120our", - "\u0120want", - "ame", - "ies", - "\u0120\u00eb", - "ud", - "ine", - "\u0120really", - "\u0120te", - "\u0120see", - "ci", - "\u0120by", - "so", - "ure", - "ose", - "\u0120[", - "are", - "\u0120more", - "ah", - "one", - "ck", - "ople", - "\u00d0\u00b0\u00d0", - "\u0120then", - "\u0120thing", - "\u0120them", - "ven", - "ound", - "ost", - "ong", - "ect", - "\u0120right", - "ag", - "\u0120int", - "\u0120people", - "\u0120when", - "ous", - "pl", - "\u0120time", - "\u0120im", - "\u0120who", - "\u01202", - "ap", - "\u0120because", - "hing", - "\u0120no", - "ice", - "\u0120look", - "\u0120has", - "\u0120would", - "\u0120how", - "act", - "\u0120fe", - "nt", - "ough", - "\u0120pr", - "\u0120But", - "\u0120say", - "\u00d1\u0125", - "\u0120now", - "\u0120man", - "\u0120very", - "\u0120work", - "iz", - "\u0120K", - "iv", - "itt", - "\u0120ar", - "ep", - "\u0120cl", - "\u0120which", - "\u0120co", - "ans", - "'ve", - "\u0120sa", - "ff", - "'ll", - "\u0120any", - "\u0120act", - "\u0120ye", - "ber", - "ach", - "age", - "per", - "\u0120also", - "fer", - "\u0120these", - "\u0120ad", - "\u00d0\u00b5\u00d0", - "ther", - "ace", - "ick", - "ake", - "reat", - "ire", - "ue", - "\u0120ag", - "\u0120U", - "uch", - "ions", - "ry", - "00", - "na", - "\u0120did", - "\u0120que", - "\u0120had", - "\u0120every", - "\u0120He", - "\u0120la", - "\u0120way", - "\u0120sp", - "ble", - "\u0120This", - "ass", - "\u0120their", - "ite", - "\u0120need", - "\u0120part", - "\u0120were", - "\u0120back", - "ip", - "own", - "omet", - "be", - "ase", - "\u0120make", - "irst", - "ia", - "ence", - "ang", - "ank", - "\u0120got", - "\u0120pre", - "\u0120cont", - "\u0120other", - "pt", - "\u0120That", - "og", - "\u0120good", - "\u0120into", - "alk", - "\u0120been", - "\u0120am", - "\u0120over", - "ually", - "\u0120\u00e2", - "\u00ec\u013f", - "\u0120und", - "he", - "way", - "\u0120gr", - "\u00d1\u012e", - "\u0120dif", - "\u0120per", - "\u00d1\u0131", - "\u0120In", - "\u0120tw", - "ond", - "ars", - "int", - "orm", - "\u0120lot", - "\u0120where", - "\u0120\u00c3", - "\u0120V", - "\u0120somet", - "\u00d0\u00bb", - "ens", - "\u0120gu", - "\u0120ac", - "ug", - "\u00d1\u012d", - "\u00c4\u00b1", - "\u0120first", - "ree", - "\u0120his", - "ittle", - "\u0120imp", - "\u0120mo", - "av", - "\u0120little", - "\u0120What", - "\u0120much", - "\u0120z", - "\u0120\u00ea", - "able", - "\u0120\u00d0\u00bf", - "\u0120po", - "\u0120comp", - "ne", - "\u0120dis", - "\u0120let", - "ance", - "\u0120her", - "\u0120things", - "\u0120start", - "ult", - "\u0120app", - "\u0120res", - "\u0120fo", - "\u0120could", - "\u0120inter", - "\u0120those", - "\u0120des", - "\u0120well", - "\u0120two", - "\u0120kind", - "xt", - "ress", - "ely", - "\u00c3\u00a4", - "\u0120br", - "\u0120thr", - "\u0120\u00d0\u00b2", - "\u0120i", - "ish", - "\u0120differ", - "\u0120ro", - "\u0120St", - "\u0120something", - "\u0120take", - "\u0120bo", - "ys", - "\u0120she", - "\u0120talk", - "lo", - "\u00d1\u0129", - "\u0120even", - "\u00d0\u00ba", - "\u00e3\u0122", - "\u0120\u00d0\u00bd", - "\u0120bu", - "\u0120If", - "\u0120down", - "\u0120Ch", - "ade", - "ations", - "\u0120use", - "ord", - "\u0120off", - "\u0120actually", - "\u0120spe", - "du", - "ated", - "ater", - "oss", - "ning", - "\u00c3\u00bc", - "\u0120does", - "\u0120\u00d1\u0123", - "\u0120new", - "\u0120bet", - "vel", - "cess", - "ple", - "\u0120happ", - "ting", - "onna", - "\u0120es", - "\u0120day", - "\u0120only", - "ign", - "kay", - "sel", - "ents", - "ount", - "ild", - "ile", - "\u0120sc", - "\u0120him", - "\u0120again", - "ving", - "\u0120gonna", - "\u0120comm", - "\u0120hel", - "other", - "\u0120ke", - "ical", - "\u01203", - "\u0120el", - "\u0120through", - "\u0120come", - "ark", - "day", - "ier", - "\u00c3\u00b3", - "\u0120than", - "\u0120They", - "\u0120may", - "\u0120ser", - "\u00ed\u0137", - "\u0120call", - "\u0120different", - "\u0120should", - "\u0120There", - "ary", - "\u0120Now", - "\u00e3\u0124", - "thing", - "we", - "ory", - "fter", - "\u0120put", - "ors", - "ial", - "\u00eb\u012d", - "\u0120under", - "\u0120inc", - "\u0120Ye", - "ub", - "form", - "\u0120vide", - "\u00e0\u00b8", - "vers", - "\u0120feel", - "\u00c3\u00a1", - "ody", - "ft", - "fore", - "\u0120em", - "get", - "\u0120said", - "ition", - "\u0120rec", - "ious", - "atch", - "\u0120try", - "\u0120help", - "\u0120show", - "\u00d0\u00b4", - "\u0120bit", - "ull", - "\u00d0\u00b2", - "\u00d1\u0124\u00d0\u00be", - "gr", - "\u0120play", - "ife", - "ail", - "\u0120Yeah", - "\u0120quest", - "\u0120many", - "\u0120pers", - "\u0120great", - "\u00c3\u0143", - "\u0120est", - "ng", - "\u0120\u00e2\u013b", - "ty", - "la", - "\u0120Oh", - "\u0120\u00d7", - "\u00e0\u00ae", - "\u0120Be", - "ady", - "\u0120most", - "ction", - "\u0120No", - "\u0120doing", - "\u0120being", - "\u0120too", - "ces", - "\u0120bl", - ".\"", - "\u0120rem", - "iss", - "ons", - ">>", - "ru", - "wn", - "ont", - "ib", - "ell", - "\u0120sm", - "oth", - "ual", - "\u0120>>", - "\u0120ph", - "les", - "oc", - "ful", - "\u0120sec", - "ise", - "\u0120add", - "igh", - "ert", - "\u0120same", - "\u00e2\u0122", - "\u0120mean", - "\u0120find", - "ek", - "\u0120end", - "--", - "\u00d0\u00bc", - "\u0120still", - "az", - "\u0120'", - "\u0120min", - "\u0120years", - "urn", - "\u0120around", - "self", - "\u0120wr", - "bs", - "ought", - "\u0120\u00e2\u013b\u00aa", - "\u0120fl", - "ange", - "\u0120after", - "\u0120point", - "mer", - "ved", - "\u0120long", - "oy", - "\u00e4\u00b8", - "\u0120cr", - "ways", - "\u0120sy", - "\u0120tra", - "\u012020", - "ave", - "\u0120che", - "\u0120ent", - "\u0120before", - "ph", - "\u0120att", - "ian", - "ily", - "\u0120person", - "\u0120big", - "\u0120sch", - "\u0120real", - "\u0120next", - "\u0120love", - "\u0120video", - "\u0120Let", - "\u0120fin", - "\u0120mak", - "ible", - "\u0120today", - "erm", - "\u0120Al", - "ower", - "ann", - "ix", - "\u0120par", - "\u0120stud", - "\u00c3\u00b6", - "\u0120import", - "te", - "\u0120give", - "ves", - "\u0120die", - "\u0120dec", - "\u0120tell", - "\u0120\u00d0\u00ba", - "\u00d1\u0123\u00d1\u0124", - "\u0120why", - "ically", - "ict", - "red", - "\u0120bas", - "\u0120sure", - "\u0120bel", - "ating", - "\u0120tak", - "\u0120set", - "\u0120life", - "\u0120didn", - "\u00d8\u00a7", - "ob", - "und", - "ath", - "\u0120op", - "\u0120\u00d0\u00be", - "ait", - "\u0120world", - "\u0120supp", - "io", - "\u0120cour", - "\u0120\u00d0\u00b8", - "ward", - "\u00d0\u00b5\u00d0\u00bd", - "\u0120always", - "up", - "\u0120hand", - "\u0120How", - "cial", - "\u0120cons", - "\u0120\u00d1", - "\u0120ind", - "\u01204", - "\u0120As", - "\u0120fun", - "ject", - "\u0120important", - "\u0120sur", - "ew", - "ates", - "\u01205", - "\u0120di", - "\u0120made", - "\u0120ins", - "\u0120ask", - "\u0120et", - "\u0120num", - "\u0120car", - "\u0120Okay", - "\u0120sim", - "ik", - "\u0120last", - "\u0120Go", - "\u0120mus", - "\u0120rel", - "ular", - "\u00b4\u00ec", - "\u0120Well", - "pect", - "\u0120Thank", - "\u0120three", - "\u00c3\u00a3", - "\u00e3\u0125", - "\u0120inv", - "\u0120gen", - "lic", - "\u0120happen", - "\u00eb\u012c", - "ien", - "ever", - "\u00d0\u00be\u00d0\u00b2", - "\u0120str", - "\u0120All", - "\u0120inst", - "\u0120\u00e2\u0122", - "\u0120def", - "\u0120sl", - "\u0120might", - "ung", - "\u0120year", - "\u0120own", - "\u0120keep", - "body", - "der", - "\u0120\u00d1\u0124", - "\u0120\u00d0\u00b4", - "\u0120another", - "\u0120mod", - "\u0120ev", - "\u0120guys", - "\u0120able", - "\u00c3\u00a3o", - "que", - "ident", - "\u0120Yes", - "\u0120its", - "\u0120place", - "\u0120produ", - "arn", - "\u0120\u00d0\u00bc", - "\u0120rep", - "\u0120exper", - "\u0120fam", - "ities", - "ific", - "\u0120high", - "ied", - "ool", - "iew", - "\u00d0\u00b5\u00d1\u0124", - "ren", - "\u0120done", - "\u0120...", - "\u00eb\u012c\u0136", - "stem", - "\u0120Se", - "\u0120better", - "come", - "\u0120del", - "\u0120ty", - "\u0120um", - "\u0120ho", - "\u0120An", - "\u0120mon", - "ings", - "\u0120sk", - "\u0120ob", - "com", - "blem", - "ope", - "stand", - "'d", - "ments", - "\u0120ele", - "\u0120Is", - "\u0120da", - "\u0120reg", - "lease", - "ike", - "als", - "ize", - "\u00ea\u00b0", - "\u0120care", - "\u0120never", - "\u00ec\u013f\u00b4", - "ese", - "\u0120met", - "olog", - "\u0120When", - "uck", - "\u00d0\u00b5\u00d1\u0122", - "\u0120\u00c3\u00a9", - "\u0120dat", - "\u00c3\u00a7", - "\u0120exam", - "ility", - "\u0120det", - "cri", - "\u0120used", - "\u0120Do", - "\u0120trans", - "eg", - "ten", - "\u00d1\u0130", - "cus", - "\u0120second", - "\u0120best", - "\u0120hard", - "\u0120ide", - "\u0120problem", - "\u00ea\u00b3", - "\u0120Un", - "\u00d1\u0127", - "\u0120\u00ce", - "\u0120watch", - "\u0120Sh", - "atter", - "\u0120pret", - "\u0120der", - "\u0120course", - "\u00c5\u0141", - "ative", - "ics", - "\u0120question", - "ute", - "\u00ec\u0139", - "\u0120For", - "ather", - "\u0120col", - "iend", - "\u0120\u00ed", - "\u0120Z", - "\u0120doesn", - "arch", - "\u0120interest", - "\u0120pol", - "\u0120cor", - "ience", - "\u0120pres", - "\u0120each", - "\u0120system", - "\u0120fact", - "iel", - "ably", - "\u0120er", - "\u0120run", - "\u0120\u00ec\u013f", - "\u0120top", - "ner", - "\u0120thought", - "\u0120eas", - "ient", - "\u0120cre", - "\u00d1\u012a", - "\u0120commun", - "ye", - "ready", - "llow", - "\u0120everything", - "omm", - "\u0120med", - "\u013c\u0136", - "\u0120count", - "its", - "\u0120compl", - "hip", - "\u00d9\u0126", - "ook", - "\u0120toget", - "\u0120together", - "amp", - "\u0120game", - "\u0120already", - "\u00d0\u00b0\u00d0\u00bb", - "\u0120called", - "ale", - "\u00c5\u0124", - "\u0120My", - "\u0120understand", - "\u0120dr", - "\u0120mom", - "ited", - "\u00d0\u00be\u00d0\u00bb", - "\u0120using", - "zy", - "\u0120number", - "\u00e3\u0122\u0123", - "ced", - "\u0120cle", - "\u00d0\u00bd\u00d0\u00be", - "\u00eb\u012d\u00a4", - "ince", - "\u0120looking", - "\u0120pretty", - "\u0120prob", - "\u0120She", - "\u0120ve", - "\u0120getting", - "\u0120week", - "\u0120eff", - "uff", - "air", - "ues", - "ern", - "\u0120Q", - "oup", - "ention", - "\u0120side", - "\u00d0\u00be\u00d0\u00bc", - "\u0120form", - "\u0120bus", - "\u0120ass", - "\u0120ed", - "ason", - "ween", - "\u00e2\u0122\u00a6", - "\u0120turn", - "\u0120cur", - "\u0120coll", - "\u0120dire", - "\u0120God", - "\u012010", - "\u0120equ", - "\u0120\u00d0\u00b1", - "\u0120open", - "\u0120such", - "ird", - "\u00d0\u00b0\u00d0\u00ba", - "\u0120ear", - "\u00c4\u013b", - "gan", - "\u0120partic", - "\u0120friend", - "\u0120exp", - "\u0120ext", - "\u0120home", - "\u0120water", - "\u0120On", - "\u00d1\u0124\u00d1\u012e", - "ork", - "\u0120\u00d0\u00bf\u00d1\u0122", - "\u0120move", - "ness", - "ense", - "ho", - "\u0120char", - "co", - "ins", - "\u0120both", - "\u012019", - "\u0120gra", - "\u0120between", - "\u00e1\u00bb", - "\u0120\u00ec\u0137", - "ash", - "\u0120Re", - "ai", - "alth", - "ures", - "ember", - "\u0120av", - "\u0120ver", - "\u00c3\u00aa", - "oney", - "\u0120thank", - "\u0120maybe", - "uc", - "ime", - "\u00ea\u00b3\u0142", - "\u0120away", - "\u0120name", - "ouse", - "\u0120acc", - "\u0120music", - "\u0120change", - "\u0120pass", - "ger", - "\u0120build", - "\u0120val", - "iness", - "any", - "\u0120few", - "\u00b4\u00eb", - "ta", - "\u0120list", - "\u00c3\u00a5", - "\u0120old", - "\u0120\u00ec\u0140", - "\u0120sort", - "\u0120mem", - "\u0120ca", - "cept", - "\u0120gener", - "\u0120yeah", - "\u0120while", - "\u0120anything", - "ric", - "gram", - "\u0120ein", - "cy", - "uring", - "\u0120De", - "\u0120power", - "\u0120coming", - "\u0120word", - "\u0120--", - "\u0120belie", - "\u0120found", - "to", - "\u00d0\u00bf", - "\u0120means", - "\u0120inform", - "\u0120\u00d8", - "\u0120\u00d1\u0129", - "\u0120small", - "000", - "\u0120came", - "\u0120\u00ed\u0137", - "wh", - "\u0120working", - "\u0120example", - "\u0120pos", - "\u0120dep", - "\u00ea\u00b2", - "\u00e4\u00ba", - "ote", - "\u0120dem", - "\u00ec\u00a7", - "ts", - "\u0120var", - "aut", - "\u0120tri", - "chn", - "\u0120head", - "\u0120whole", - "\u00d7\u013b", - "ze", - "\u0120trying", - "\u0120tem", - "\u0120cou", - "ets", - "\u01206", - "\u0120fil", - "velop", - "\u0120case", - "\u00e0\u00af", - "\u0120probably", - "\u0120okay", - "\u0120plan", - "\u0120sit", - "\u0120school", - "\u0120Then", - "\u00b8\u00eb", - "me", - "\u0120process", - "\u0120far", - "\u0120read", - "\u0120poss", - "\u0120bre", - "\u0120sol", - "icht", - "\u0120support", - "\u0120To", - "ertain", - "\u0120started", - "\u0120cap", - "\u0120left", - "\u0120data", - "\u0120times", - "\u00d0\u00b5\u00d0\u00bb", - "\u0120wanted", - "\u00d0\u00b0\u00d0\u00bd", - "\u0120talking", - "\u0120ist", - "\u0120having", - "ump", - "\u0120contin", - "\u0120sub", - "\u0120\u00d0\u00b7", - "pr", - "\u00eb\u012d\u012a", - "ina", - "\u00c5\u00bc", - "\u0120creat", - "ode", - "\u00d7\u0137", - "\u00e6\u013a", - "!!", - "\u0120term", - "ism", - "\u00d0\u00be\u00d0\u00b4", - "\u0120Because", - "\u0120went", - "ider", - "\u0120prov", - "\u0120child", - "\u0120den", - "\u0120light", - "br", - "\u00b3\u00d0\u00be", - "oh", - "\u0120book", - "\u0120\u00d9", - "ution", - "\u0120Just", - "ene", - "\u0120four", - "\u0120vis", - "\u00ea\u00b0\u0122", - "\u0120hope", - "\u0120making", - "\u0120Le", - "\u00ec\u0137", - "\u0120opp", - "au", - "\u0120money", - "\u0120program", - "\u00c3\u00a8", - "\u0120stand", - "IN", - "\u0120sign", - "\u0120learn", - "\u00c3\u0142", - "\u0120Don", - "\u0120team", - "\u0120\u00d0\u00bd\u00d0\u00b0", - "lud", - "\u0120rest", - "ices", - "\u00e6\u013e", - "\u0120\u00d1\u0122", - "\u0120aut", - "\u0120lead", - "ational", - "de", - "gy", - "\u0120nice", - "\u0120das", - "\u0120dist", - "\u0120hum", - "\u0120One", - "\u00e6\u012a", - "\u0120comes", - "\u0120jo", - "\u0120cent", - "\u0120expl", - "\u0120mark", - "reen", - "led", - "gin", - "\u00ec\u013c\u0136", - "\u0120level", - "\u0120conf", - "ush", - "\u0120develop", - "\u0120test", - "eng", - "vious", - "ature", - "\u00d0\u00b5\u00d0\u00bc", - "ret", - "\u0120je", - "\u0120stuff", - "\u0120class", - "ows", - "\u0120\u00ea\u00b7", - "\u0120si", - "\u0120les", - "rop", - "\u00e7\u013c", - "\u0120por", - "\u0120war", - "\u00ec\u0139\u0132", - "\u0120everyone", - "\u0120ge", - "\u0120check", - "ott", - "\u0120sing", - "\u0120art", - "\u0120follow", - "\u0120201", - "\u0120Fr", - "ais", - "\u00ec\u0138", - "\u00ce\u00b1", - "\u00e5\u00b0", - "\u0120\u00c3\u0142", - "imes", - "\u0120ret", - "\u0120chang", - "\u0120pub", - "\u0120inf", - "\u0120techn", - "ada", - "ives", - "\u0120beh", - "\u00e6\u013a\u00af", - "\u0120looks", - "\u00e3\u0122\u0124", - "\u00d0\u00b7", - "\u0120Why", - "\u00e7\u013c\u0126", - "\u0120enough", - "\u0120bra", - "itch", - "\u00e4\u00bb", - "\u0120adv", - "\u00d0\u00b1", - "\u0120without", - "wer", - "meric", - "den", - "\u0120complet", - "\u0120idea", - "ters", - "ock", - "\u0120defin", - "\u0120ever", - "\u0120gl", - "\u0120once", - "\u0120bring", - "\u0120saying", - "\u0120ans", - "\u0120hear", - "nect", - "\u0120less", - "go", - "ream", - "ado", - "\u00ec\u0140", - "\u0120mind", - "ente", - "\u0120full", - "\u0120bad", - "\u0120wom", - "\u0120someone", - "\u0120du", - "\u0120won", - "\u0120contro", - "ortun", - "\u0120health", - "\u0120cho", - "\u0120Ar", - "\u0120conc", - "\u0120information", - "\u0120stop", - "att", - "ately", - "\u00e4\u00bd", - "\u0120group", - "\u0120\u00d1\u0125", - "\u0120quite", - "\u0120resp", - "ER", - "ught", - "\u00ea\u00b8", - "man", - "ized", - "\u0120Br", - "\u0120remember", - "\u0120family", - "\u0120business", - "aw", - "\u0120spec", - "\u0120au", - "\u0120Or", - "\u00c4\u0127", - "\u0120seen", - "\u0120lar", - "\u01207", - "gg", - "bers", - "\u0120dra", - "\u0120month", - "\u0120says", - "\u0120iss", - "\u0120live", - "\u0120line", - "\u0120moment", - "\u0120exc", - "els", - "\u0120sound", - "\u0120cool", - "\u0120loc", - "\u0120certain", - "\u0120dri", - "\u00d0\u00be\u00d1\u0124", - "ames", - "\u0120must", - "ny", - "\u00d0\u00b8\u00d1\u0124", - "\u0120kid", - "\u0120includ", - "\u00ec\u013f\u0126", - "ator", - "\u00c4\u0141", - "ha", - "ared", - "\u0120seem", - "\u00d0\u00b9", - "\u00ec\u0126", - "\u0120else", - "\u0120\u00ec\u0142", - "irl", - "\u01208", - "\u0120vo", - "\u0120questions", - "ines", - "ee", - "\u00e6\u012a\u0133", - "\u00c3\u00bcr", - "\u0120Americ", - "\u0120story", - "\u0120serv", - "vern", - "ages", - "land", - "\u0120\u00e2\u0122\u0135", - "era", - "\u0120Can", - "\u0120pop", - "ether", - "\u0120na", - "\u0120order", - "\u0120makes", - "\u0120since", - "con", - "ctor", - "\u0120though", - "\u0120product", - "\u00d0\u00bb\u00d0\u00b8", - "\u0120leg", - "\u0120meet", - "alf", - "\u00d1\u0123\u00d1\u0131", - "unch", - "iter", - "ove", - "\u00d7\u0137\u00d7", - "iet", - "\u00d0\u00b0\u00d0\u00bc", - "ital", - "\u0120super", - "ling", - "\u0120pay", - "\u0120para", - "\u0120job", - "\u0120Here", - "\u0120sw", - "ks", - "ption", - "ma", - "\u0120believe", - "\u00ac\u00eb", - "\u0120wait", - "\u00d0\u00be\u00d0\u00b9", - "\u0120unt", - "\u0120quick", - "hr", - "\u0120\u00d1\u012f", - "\u0120Pro", - "\u0120men", - "\u00e0\u00b9", - "\u0120days", - "\u0120goes", - "\u0120speak", - "\u0120At", - "ement", - "\u0120miss", - "\u0120aw", - "\u0120design", - "\u0120project", - "\u00d0\u00be\u00d1\u0122", - "ij", - "ants", - "ats", - "\u0120Chr", - "\u01209", - "\u0120cut", - "\u0120requ", - "\u0120\u00d0\u00bd\u00d0\u00b5", - "\u0120Not", - "aster", - "\u0120mill", - "\u0120particular", - "\u0120pie", - "\u0120students", - "\u0120five", - "oun", - "\u0120Ne", - "\u0120gi", - "\u0120pas", - "\u0120free", - "\u0120Sp", - "lich", - "\u0120prof", - "\u0120eng", - "\u0120prot", - "\u0120Like", - "osed", - "\u0120connect", - "app", - "\u0120\u00eb\u00a7", - "iting", - "\u0120blo", - "\u0120los", - "ists", - "\u0120experience", - "rent", - "\u0120stay", - "\u0120food", - "ton", - "ruct", - "\u0120hist", - "view", - "ining", - "most", - "ivers", - "bo", - "\u00e3\u0123\u0126", - "\u0120Tr", - "gen", - "\u0120please", - "\u0120community", - "\u0120ce", - "AN", - "no", - "\u0120body", - "\u0120hour", - "\u0120vers", - "\u00e1\u00ba", - "cer", - "\u0120\u00ea\u00b0", - "\u0120reason", - "\u0120Right", - "\u0120later", - "\u00cf\u0126", - "\u0120house", - "\u0120X", - "\u00d0\u00be\u00d0\u00bd", - "\u0120state", - "fic", - "\u00e5\u00a4", - "\u00c5\u013d", - "ield", - "\u0120pri", - "\u0120past", - "\u0120walk", - "ology", - "ering", - "anna", - "\u0120ter", - "\u0120hold", - "\u0120organ", - "ben", - "\u00ce\u00bf", - "\u00c3\u00b3n", - "\u0120effect", - "\u0120yourself", - "\u0120plus", - "aj", - "ando", - "ural", - "\u0120room", - "lect", - "\u00ea\u00b2\u012e", - "?\"", - "side", - "\u0120become", - "\u00d1\u0128", - "\u0120\u00c2", - "ood", - "\u0120const", - "\u0120night", - "utes", - "\u00d0\u00b6", - "\u0120break", - "\u0120pain", - "\u0120step", - "ired", - "\u0120nothing", - "\u0120until", - "\u00d1\u0138", - "\u00d0\u00b0\u00d0\u00b2", - "\u00d9\u012c", - "\u0120during", - "\u00ec\u00a7\u0122", - "less", - "oll", - "\u00d0\u00bd\u00d1\u012d", - "\u00ce\u00b9", - "fect", - "iver", - "\u0131\u0126", - "ither", - "ying", - "\u0120begin", - "\u00d7\u013b\u00d7", - "ivid", - "\u0120\u00c3\u00a7", - "\u0120sal", - "\u0120ta", - "\u0120pot", - "\u0120$", - "\u0120mar", - "\u0120clear", - "\u0120face", - "\u0120grow", - "\u0120*", - "\u0120inside", - "\u0120friends", - "\u0120leave", - "enn", - "\u0120easy", - "\u0120area", - "ality", - "oud", - "\u0120eat", - "\u00d9\u0128", - "\u0120pur", - "orn", - "\u0120saw", - "\u0120answer", - "\u0120front", - "\u0120beaut", - "\u00bc\u00eb", - "\u0120matter", - "\u0120son", - "\u0120New", - "\u0120result", - "ides", - "che", - "\u0120fut", - "ps", - "\u0120focus", - "\u0120interesting", - "\u00e5\u00a5", - "\u0120ap", - "\".", - "\u0120create", - "\u00d0\u00be\u00d1\u0123", - "\u0120press", - "ross", - "\u0120pick", - "line", - "\u0120took", - "\u0120May", - "row", - "\u0120ich", - "\u013a\u00eb", - "\u0120ref", - "\u0120mor", - "ract", - "arent", - "AR", - "\u0120exact", - "\u0120space", - "work", - "\u00d0\u00bd\u00d0\u00b8", - "\u0120bir", - "\u0120dev", - "\u00d0\u00b3", - "\u0120told", - "\u0120public", - "cially", - "\u0120view", - "\u0120Hey", - "med", - "llo", - "cc", - "\u0120fac", - "\u0120couple", - "\u0120heart", - "ler", - "\u0120ready", - "\u0120almost", - "aring", - "\u0120half", - "\u0120Me", - "avor", - "ique", - "\u0120charac", - "\u0120pract", - "ON", - "ane", - "\u0120il", - "\u00d0\u00bd\u00d0\u00b0", - "\u0120vi", - "lish", - "head", - "\u0120least", - "\u0120basically", - "ased", - "right", - "\u0120yet", - "\u0120taking", - "\u0120country", - "\u0120win", - "\u0120isn", - "\u0120possible", - "\u0120cam", - "\u0120incre", - "\u0120pat", - "\u0120wanna", - "\u0120consider", - "\u0120abs", - "\u0120within", - "\u0120human", - "\u0120thinking", - "\u0120oh", - "\u00a1\u013e", - "\u0120qui", - "ases", - "\u01200", - "itely", - "\u00e4\u00b8\u012f", - "\u0120kill", - "\u0120mil", - "\u0120invest", - "ister", - "\u0120suc", - "ional", - "elf", - "\u0120whether", - "\u0120control", - "\u0120against", - "ots", - "\u00eb\u012d\u012a\u00eb\u012d\u00a4", - "ior", - "\u0120present", - "\u0120\u00d8\u00a7", - "\u0120watching", - "ube", - "erv", - "\u0120nicht", - "\u0120govern", - "\u0120These", - "\u0120:", - "uit", - "ugh", - "\u0120works", - "oo", - "\u0120wir", - "\u0120air", - "\u0120Te", - "\u00d0\u00b0\u00d0\u00b7", - "ision", - "where", - "\u0120tot", - "joy", - "\u00ec\u012d", - "\u0120vol", - "\u0120\u00d0\u00b5", - "\u0120close", - "\u0120Ad", - "\u00d1\u012b", - "ined", - "\u0120una", - "\u0120\u00ea\u00b7\u00b8\u00eb", - "\u00b0\u00eb", - "orry", - "\u0120bro", - "\u0120film", - "ift", - "20", - "\u0120type", - "\u0120happened", - "\u0120Am", - "\u0120girl", - "\u0120Are", - "wards", - "\u0120pour", - "\u0120color", - "elt", - "\u00d0\u00b0\u00d1\u0123", - "\u0120sense", - "lex", - "\u0120With", - "uss", - "rib", - "\u0120rese", - "\u0120norm", - "\u0120future", - "\u0120deal", - "ending", - "ey", - "\u0120x", - "ero", - "\u0120Cl", - "uk", - "\u0120whatever", - "selves", - "\u0120young", - "\u00ec\u012c", - "\u0120Mar", - "\u0120Christ", - "\u0120guess", - "\u0120perform", - "\u0120ener", - "ron", - "\u0120hit", - "\u0120wond", - "\u0120direct", - "\u0120Every", - "\u0120often", - "\u0120fa", - "\u0120along", - "\u0120click", - "\u0120Look", - "\u0120situ", - "\u0120happy", - "ead", - "\u0120ago", - "\u0120enc", - "\u0120myself", - "\u0120cover", - "\u00d0\u00be\u00d0\u00b1", - "\u0120mid", - "\u0120cost", - "\u0120ten", - "\u0120Sch", - "\u0120expect", - "\u0120wasn", - "\u0120strong", - "iful", - "\u0120opportun", - "inal", - "yle", - "\u0120share", - "\u0120true", - "\u0120appro", - "\u0120chall", - "\u0120minutes", - "\u0120chann", - "\u0120\u00eb\u0124", - "\u00ce\u00b5", - "li", - "\u0120mess", - "ories", - "pecially", - "\u0120wrong", - "\u0120yes", - "\u0120\u00ec\u0139", - "iron", - "\u0120allow", - "\u0120subs", - "\u0120fore", - "\u0120fight", - "\u0120social", - "\u0120cra", - "ana", - "\u0120aff", - "\u0120ess", - "\u0120ways", - "\u0120short", - "\u0120fall", - "\u0120law", - "\u0120Who", - "\u0120enjoy", - "\u0120cal", - "\u0120access", - "fe", - "\u0120non", - "\u0120across", - "ery", - "viously", - "\u0120Ex", - "ided", - "\u0120link", - "\u0120Pr", - "\u0120terms", - "aces", - "\u0120land", - "azing", - "\u012015", - "\u0120mult", - "\u0120special", - "\u00e5\u0122", - "iving", - "\u00ec\u013f\u0122", - "\u0120typ", - "\u0120ste", - "\u0120\u00c4", - "\u0120forward", - "\u00e5\u0131", - "\u0120fre", - "\u00e5\u00a5\u00bd", - "\u0120research", - "\u00e0\u00af\u012f", - "\u00d0\u00b0\u00d1\u0124", - "\u0120main", - "\u0120record", - "\u0120hu", - "\u0120definitely", - "\u0120either", - "\u0120listen", - "\u0120key", - "\u0120market", - "\u0120\u00d1\u0129\u00d1\u0124\u00d0\u00be", - "ization", - "\u0120videos", - "\u0120guy", - "\u0120fig", - "\u0120stra", - "\u0120Pl", - "ully", - "amos", - "\u0120mention", - "\u0120song", - "\u0120intern", - "ral", - "urs", - "\u0120hon", - "\u0120value", - "\u0120bar", - "cle", - "\u00d0\u00be\u00d0\u00b6", - "\u00c4\u0129", - "\u013e\u00eb", - "\u0120zu", - "\u00d0\u00b8\u00d0\u00bc", - "\u00e4\u00bd\u0142", - "\u0120single", - "\u0120auch", - "cuss", - "\u0120gets", - "\u0120sometimes", - "\u00e5\u00be", - "amb", - "mm", - "cing", - "\u0120perfect", - "\u0120Bl", - "outh", - "\u00ec\u0142", - "\u0120sci", - "par", - "\u0120red", - "\u0120post", - "\u0120mot", - "\u0120elect", - "\u0120Eu", - "itive", - "\u0120Some", - "\u0120descri", - "\u0120current", - "\u00c3\u00a9s", - "\u0120tre", - "\u0120En", - "\u0120mit", - "EN", - "\u012a\u00eb", - "ium", - "\u0120heard", - "\u0120simple", - "lar", - "\u0120everybody", - "ilar", - "\u0120needs", - "\u0120diffic", - "\u0120Good", - "ument", - "cent", - "\u0120oper", - "\u00d0\u00b0\u00d1\u0124\u00d1\u012e", - "ety", - "\u0120black", - "\u0120given", - "ones", - "\u0120wel", - "\u00e9\u0122", - "\u0120\u00ec\u0137\u0126", - "\u012030", - "AT", - "\u0120stat", - "ouch", - "\u0120Mr", - "\u00d0\u00b0\u00d1\u0122", - "\u0120sho", - "\u0120cond", - "\u00d7\u0136", - "my", - "\u0120children", - "\u0120eu", - "\u00d0\u00b5\u00d0\u00b4", - "\u00ec\u0137\u0126", - "tern", - "\u0120uh", - "\u0120har", - "\u0120prom", - "\u0120pull", - "rew", - "\u0120company", - "\u0120beautiful", - "ustom", - "\u00ed\u0137\u013a", - "\u00d0\u00ba\u00d0\u00b8", - "\u0120stre", - "\u0120amazing", - "ries", - "\u0120success", - "\u0120mach", - "not", - "\u0120discuss", - "\u0120nat", - "\u00a6\u00ac", - "\u0120une", - "\u0120difficult", - "\u0120ris", - "\u00ce\u00bd", - "\u0120camp", - "\u0120buy", - "\u00e4\u00b8\u0122", - "\u0120mag", - "po", - "\u0120Your", - "\u0120behind", - "ica", - "\u00c4\u00b1n", - "\u0120OK", - "\u0120lang", - "\u0120women", - "\u0120env", - "\u0120rece", - "\u0120channel", - "ially", - "ule", - "\u012012", - "thers", - "\u0120bott", - "\u0120report", - "ently", - "fully", - "The", - "\u0120sent", - "\u0120event", - "\u0120energy", - "lt", - "\u0120words", - "arr", - "dle", - "\u0120ahead", - "ards", - "\u00d8\u00b1", - "\u00e4\u00ba\u0128", - "\u0120tool", - "conom", - "\u00d0\u00b5\u00d1\u0123", - "\u0120exactly", - "\u0120favor", - "\u0120low", - "\u0120proper", - "\u0120\u00ec\u0140\u012a", - "\u0120!", - "\u0120relations", - "\u0120mas", - "\u0120kids", - "\u0120entire", - "ude", - "\u00d9\u0127", - "\u0120Where", - "\u0120ones", - "\u0120city", - "olut", - "\u0120six", - "ability", - "\u00c3\u00b6r", - "ili", - "\u0120Es", - "\u0120happens", - "ains", - "\u0120model", - "\u0120pict", - "\u0120especially", - "\u0120100", - "kt", - "\u0120soon", - "by", - "rodu", - "\u0120ann", - "\u0120subscri", - "\u0120Qu", - "\u0120avail", - "iment", - "\u0120voc", - "ka", - "\u0120200", - "aper", - "\u0120Ind", - "\u0120\u00ec\u00a7", - "hor", - "\u012f\u00b0", - "jor", - "\u00d0\u00b8\u00d0\u00bb", - "\u0120squ", - "AU", - "arning", - "\u0120\u00d0\u00b3", - "IS", - "\u0120\u00d0\u00bb", - "\u00d0\u00b5\u00d0\u00b9", - "yes", - "\u00e5\u0127", - "\u0120\u00d0\u0134", - "\u0120orig", - "\u00d0\u00be\u00d0\u00b3\u00d0\u00be", - "\u0120asked", - "ilt", - "\u00d0\u00be\u00d0\u00b3", - "\u0120continue", - "\u0120\u00ec\u013a", - "ram", - "\u0120others", - "ES", - "ohn", - "\u0120lay", - "\u0120based", - "\u0120pu", - "\u0120appe", - "\u0120lim", - "\u0120prop", - "\u0122\u00eb", - "min", - "\u0120hot", - "\u0120La", - "\u0120fast", - "\u0120protect", - "\u0120amount", - "\u0120aqu", - "\u0120fund", - "\u0120custom", - "\u0120cult", - "\u0120hands", - "\u0120haven", - "\u0120aud", - "\u0120outside", - "\u0120After", - "aps", - "\u0120anim", - "ploy", - "\u0120hat", - "\u0120First", - "\u0120treat", - "\u0120ep", - "\u0120mater", - "\u0120building", - "\u0120\u00eb\u00b0", - "\u00e5\u0132", - "\u00ec\u0126\u013e", - "za", - "ughter", - "\u0120Pe", - "ney", - "eter", - "atic", - "\u0120educ", - "\u00ea\u00b8\u00b0", - "\u0120mov", - "\u0135\u00a4", - "ama", - "ration", - "\u0120sn", - "\u00d9\u012a", - "\u0120sum", - "\u0120phot", - "\u0120\u00d0\u013f", - "\u0120.", - "\u00e6\u013e\u012b", - "\u0120finish", - "itting", - "\u00e5\u00ae", - "\u0120large", - "\u0120\u00ec\u0138", - "\u0120white", - "ara", - "\u0120mais", - "\u0120Hi", - "\u0120dam", - "\u0120\u00d8\u00a7\u00d9\u0126", - "\u0120box", - "\u0120Hello", - "\u0120sle", - "\u0120opt", - "ried", - "\u00a5\u00bc", - "\u0120activ", - "\u0120n\u00c3\u00a3o", - "\u0120Com", - "\u0120playing", - "Th", - "\u0120available", - "\u0120port", - "\u00e5\u012a", - "\u0120Ah", - "\u0120las", - "\u0120early", - "\u0120wonder", - "\u00b1\u00b0", - "\u012018", - "cul", - "\u0120function", - "\u0120morning", - "lle", - "ients", - "ux", - "\u0120cir", - "itions", - "\u0120deep", - "\u0120polit", - "yor", - "mp", - "aking", - "\u012e\u00eb", - "\u0120Man", - "\u0120million", - "\u0120/", - "\u0120individ", - "\u0120pan", - "\u0120government", - "\u0120write", - "\u0120Tod", - "ament", - "\u0120\u00cf", - "\u0120wind", - "\u0120Eng", - "chen", - "Wh", - "\u00ec\u013e", - "\u0120ident", - "\u00e3\u0123\u00a7", - "vent", - "urch", - "\u0120hy", - "\u0120ya", - "\u0120trad", - "\u0120relationship", - "\u00c3\u00ba", - "\u0120dou", - "OR", - "\u0120swe", - "\u0120neg", - "ination", - "\u0120text", - "ipp", - "\u0120fine", - "\u00c3\u00a1s", - "\u0120Dr", - "\u0120Come", - "\u0120months", - ",\"", - "\u00d0\u00b5\u00d0\u00bd\u00d0\u00b8", - "\u0120hours", - "\u0120pod", - "irt", - "\u0120invol", - "\u0120collect", - "\u0120auf", - "\u0120pa", - "\u0120history", - "mb", - "ify", - "\u0120?", - "\u0120below", - "asure", - "aby", - "\u0120langu", - "\u0120ant", - "\u0120comb", - "ato", - "\u0120exist", - "\u0120\u00eb\u012d", - "\u0120takes", - "\u0120character", - "aff", - "\u0120field", - "\u0120econom", - "ief", - "\u0120piece", - "\u00e5\u013e", - "\u0120reach", - "\u0120\u00ea\u00b2", - "ony", - "\u0120material", - "\u0120dig", - "\u0120phys", - "\u0120impro", - "\u0120similar", - "IC", - "\u0120net", - "yn", - "\u0120position", - "\u00c3\u0141", - "\u0120bene", - "read", - "\u0120learning", - "ume", - "\u0120clean", - "\u00d1\u0124\u00d0\u00be\u00d1\u0122", - "\u0120cook", - "\u0120seems", - "\u0120ol", - "\u0120US", - "\u0120Jes", - "\u0120\u00e0\u00ae", - "ential", - "iversity", - "acy", - "\u0120\u00d1\u0131", - "olutely", - "rect", - "\u0120Please", - "\u0120repres", - "\u0120touch", - "men", - "\u0120\u00d0\u00b0", - "i\u00c3\u00b3n", - "\u0120Thanks", - "\u0120ang", - "\u0120major", - "\u0120itself", - "ills", - "\",", - "ians", - "\u0120screen", - "\u0120hor", - "\u0120known", - "\u0120environ", - "\u0120final", - "\u0120figure", - "\u0120Tw", - "\u0120eyes", - "\u0120imag", - "\u0120seeing", - "\u0120hair", - "rem", - "\u0120applic", - "ends", - "put", - "\u0120news", - "\u0120completely", - "ughs", - "\u0120knew", - "ified", - "\u0120Je", - "\u0120Did", - "\u0120situation", - "\u0120flo", - "ms", - "\u0120phone", - "\u0120ball", - "do", - "\u0120parent", - "\u0120sorry", - "ury", - "\u00d0\u00b8\u00d0\u00bd", - "ips", - "\u00d0\u00b0\u00d0\u00b4", - "\u0120instead", - "\u0120huge", - "\u0120tu", - "\u0120\u00e3\u0123", - "\u0120Gr", - "\u0120detail", - "\u0120\u00d0\u0141", - "\u0120individual", - "\u0120fire", - "\u0120clos", - "\u0120wer", - "une", - "\u0120running", - "\u0120convers", - "\u0120recomm", - "\u0120como", - "\u0120somebody", - "\u0120John", - "\u0120\u00ec\u013f\u00b4", - "\u0120Our", - "ples", - "\u0120Ph", - "\u0120anal", - "\u012050", - "\u0120offer", - "\u0120<", - "itional", - "gest", - "\u0120vous", - "let", - "icy", - "\u0120feeling", - "LE", - "ros", - "\u0120third", - "\u00d0\u00be\u00d0\u00ba", - "\u0120series", - "\u0120Any", - "ised", - "old", - "\u0120draw", - "\u0120service", - "\u0120cannot", - "bal", - "\u00e3\u0123\u0128", - "\u0120living", - "\u00c4\u00b1m", - "\u0120difference", - "\u0120opportunity", - "\u0120near", - "orth", - "ken", - "\u0120local", - "\u00d8\u00aa", - "\u0120Con", - "\u0120object", - "\u0120dass", - "\u00e3\u0123\u013b", - "\u0132\u00d7", - "\u0120quickly", - "raph", - "\u0120issues", - "\u00e9\u0122\u013b", - "\u0120American", - "\u0120prep", - "ences", - "\u0120profess", - "lling", - "of", - "\u0120foot", - "bre", - "\u0120usually", - "\u0120general", - "da", - "ances", - "\u0120dest", - "\u0120occ", - "\u0120members", - "\u0120dans", - "\u0120equal", - "zt", - "\u0120becom", - "\u0120moving", - "\u0120specific", - "\u00c3\u0143a", - "\u0120fur", - "\u0120necess", - "\u0120common", - "\u0120attack", - "\u0120\u00d1\u012f\u00d1\u0124\u00d0\u00be", - "\u0120Today", - "\u0120uns", - "\u0120Gu", - "iod", - "\u0120account", - "\u0120grand", - "\u0120self", - "\u0120El", - "\u0120tast", - "\u0120content", - "\u0120cu", - "\u0126\u00eb", - "\u0120Maybe", - "\u0120Jesus", - "ores", - "port", - "\u00a9\u00b4", - "\u0120gives", - "\u0120normal", - "\u00d1\u0122\u00d1\u0125", - "\u0120impact", - "\u00c3\u00a4r", - "\u0120dies", - "\u0120lab", - "sh", - "ios", - "\u0120Pres", - "\u0120Und", - "\u0120Of", - "\u0120finally", - "\u0120doll", - "\u0120voc\u00c3\u00aa", - "ply", - "\u0120Ag", - "\u0120taken", - "\u0120ground", - "fort", - "\u0120gave", - "\u0120Inst", - "\u0120lost", - "\u0120worked", - "\u0120liter", - "\u0120issue", - "\u0120indust", - "\u0120return", - "\u0120happening", - "\u0120wants", - "\u00d0\u00b8\u00d0\u00b2", - "\u0120problems", - "\u0120Car", - "\u013f\u00bc", - "\u0120Also", - "\u0120size", - "\u0120obviously", - "\u0120Su", - "\u0120Sc", - "\u0120recommend", - "ources", - "astic", - "....", - "\u0120mi", - "lier", - "\u0120Even", - "cia", - "\u0120hur", - "va", - "\u0120mass", - "\u0120wouldn", - "unt", - "cks", - "\u0120felt", - "osp", - "light", - "\u00d0\u00be\u00d0\u00bb\u00d1\u012e", - "nie", - "\u0120bottom", - "\u0120\u00d0\u00b1\u00d1\u012d", - "ored", - "ison", - "\u0120grad", - "\u0120uma", - "\u0120va", - "\u0120\u00ec\u0124", - "ression", - "ulation", - "ID", - "idence", - "\u0120bur", - "\u0120gone", - "lu", - "\u00ec\u0138\u00b4\u00ec", - "\u0120redu", - "\u0120ja", - "\u00ec\u013f\u013a", - "ita", - "\u0120soft", - "\u0120\u00c3\u00a7a", - "ico", - "eral", - "\u00c3\u00b1", - "af", - "\u0120points", - "gu", - "\u0120d\u00c3\u00a9", - "apt", - "ax", - "\u0120Alright", - "\u0120camera", - "\u0120ach", - "\u0120\u00d0\u00bf\u00d0\u00be", - "\u0120sever", - "50", - "\u0120sie", - "\u00cf\u0123", - "\u0120mal", - "\u0120comput", - "\u0120middle", - "\u0120couldn", - "ming", - "\u0120\u00ec\u012d", - "\u0120His", - "\u0120games", - "\u0120introdu", - "\u0120cell", - "por", - "\u0120sleep", - "\u0120\u00eb\u00b3", - "iding", - "\u0120ou", - "\u0120deg", - "\u0120drink", - "\u0120environment", - "\u0120United", - "\u0120talked", - "\u0120choose", - "\u0120jour", - "ege", - "\u0120Min", - "\u0120inte", - "\u0120rather", - "\u0120offic", - "\u00d0\u00ba\u00d0\u00b0", - "aching", - "\u0120mentioned", - "\u0120fill", - "\u0120track", - "\u0120nie", - "\u0120ut", - "\u0120\u00d0\u00b2\u00d1\u012d", - "ibility", - "\u0120vac", - "\u0120rad", - "\u0120pack", - "\u0120send", - "\u0120Das", - "\u0120Ab", - "\u0120engine", - "\u00e3\u0123\u0139", - "\u0120compet", - "\u00c3\u00b4", - "\u0120\u00d0\u00b2\u00d1\u0123", - "\u0120door", - "\u0120longer", - "\u00e5\u00b0\u012f", - "\u0120language", - "\u0120extra", - "play", - "\u0120webs", - "umb", - "room", - "\u00e7\u013e", - "\u0120beginning", - "\u0120refer", - "AM", - "nen", - "igher", - "face", - "erc", - "\u0120forget", - "\u0120comment", - "\u00d0\u00b5\u00d0\u00ba", - "\u00d0\u00bb\u00d1\u0131", - "ror", - "\u00c5\u00bce", - "\u0120Ge", - "\u0120dark", - "\u0120anyone", - "ante", - "ges", - "\u00ec\u012c\u00b5", - "\u00d1\u0133", - "bed", - "je", - "ructure", - "\u0120prim", - "ida", - "\u00e8\u00a6", - "\u00e3\u0123\u00be", - "\u0120mix", - "\u0120starting", - "\u0120\u00ec\u013f\u00b4\u00eb", - "\u0120provide", - "action", - "\u0120mother", - "\u0120period", - "\u0120stick", - "\u0120YouT", - "\u0120technology", - "\u00ea\u00b9", - "\u0120bed", - "\u0120giving", - "\u0120explain", - "zen", - "imate", - "\u0120represent", - "load", - "\u0120However", - "\u0120lives", - "uth", - "irit", - "ogn", - "\u0120lik", - "\u0120respons", - "\u0120priv", - "\u0120tom", - "\u00c3\u00a7\u00c3\u00a3o", - "iam", - "\u0120excited", - "\u0120card", - "ground", - "\u0120\u00d7\u0136", - "\u0120sens", - "\u0120teach", - "ido", - "hod", - "\u0120epis", - "\u0120welcome", - "\u0120wall", - "\u00e4\u00b9", - "\u0120chance", - "hen", - "\u0120\u00d0\u00a1", - "\u0120\u00c4\u0133", - "\u0120simply", - "\u0120\u00d1\u0124\u00d0\u00b0\u00d0\u00ba", - "ring", - "ja", - "book", - "\u0120several", - "ste", - "\u0120created", - "\u0120\u00d0\u00be\u00d1\u0124", - "\u0120push", - "==", - "\u0120higher", - "uf", - "ource", - "oke", - "\u0120online", - "\u0120rele", - "\u0120ton", - "ensive", - "\u0120favorite", - "\u00d1\u0125\u00d0\u00b4", - "\u0120looked", - "\u0120von", - "\u00e2\u0122\u0136", - "\u0120f\u00c3\u00bcr", - "\u0120button", - "\u0120bill", - "\u0120changes", - "!\"", - "\u0120slow", - "ables", - "\u0120death", - "ands", - "ateg", - "\u0120themselves", - "\u00e3\u0123\u00a3", - "\u0120cop", - "\u00e3\u0123\u00ae", - "\u0120personal", - "ughing", - "\u012011", - "gar", - "ades", - "\u0120needed", - "\u0120study", - "aged", - "\u00d1\u0123\u00d1\u0124\u00d0\u00b2", - "ino", - "\u0120disc", - "ki", - "\u0120address", - "\u00d7\u00a8", - "itten", - "esome", - "\u0120\u00d0\u00b6", - "\u00a4\u00eb", - "ura", - "\u0120mu", - "\u0120continu", - "for", - "\u0120match", - "\u00e3\u0123\u00a6", - "\u0120straight", - "\u0132\u00eb", - "ners", - "\u0120dog", - "\u0120deb", - "\u0120CO", - "\u0120os", - "ged", - "came", - "\u0120correct", - "ette", - "\u0120See", - "\u0120including", - "\u0120Euro", - "ester", - "\u0120jump", - "\u0120Which", - "\u0120\u00d0\u00ba\u00d0\u00b0\u00d0\u00ba", - "son", - "ya", - "ING", - "\u0120eine", - "osh", - "ency", - "\u0120media", - "\u0120subscribe", - "\u00e9\u0124", - "\u0120prin", - "\u0120hab", - "\u0120Per", - "\u0120Was", - "\u0120page", - "itor", - "\u0120towards", - "\u0120tried", - "enge", - "artment", - "\u0120vari", - "\u0120paper", - "\u0120picture", - "\u0120version", - "\u0120brought", - "ware", - "\u0120States", - "\u0120sich", - "ledge", - "\u0120percent", - "\u0120god", - "ec", - "\u0120Comm", - "\u0120decided", - "\u0120select", - "\u00ed\u0137\u013e", - ").", - "urity", - "\u0120further", - "\u0120comments", - "lement", - "\u0120dream", - "\u0120center", - "mi", - "\u0120cas", - "\u0120woman", - "\u0120road", - "\u0120fail", - "\u0120became", - "lus", - "ilities", - "\u00e3\u0123\u00af", - "\u0120Co", - "\u0120manage", - "\u0120recogn", - "\u0120action", - "\u0120benef", - "\u0120earlier", - "\u00d7\u013e", - "\u0120speed", - "\u0120ment", - "\u0120soci", - "\u0120shoot", - "ui", - "\u0120\u00c3\u00a4", - "\u0120apply", - "vo", - "xim", - "\u0120cause", - "\u0120surpr", - "\u0120haben", - "DI", - "\u0120father", - "\u0120Next", - "\u0120YouTube", - "\u0120code", - "\u0120role", - "gress", - "\u0120green", - "ett", - "\u0120built", - "\u0120flow", - "\u0120base", - "\u0120training", - "\u0120round", - "\u0120Will", - "\u0120path", - "\u0120Ro", - "\u0120interested", - "\u00ec\u0138\u00b4", - "\u0120respect", - "\u0120changed", - "ission", - "\u0120student", - "ograph", - "\u0120approach", - "\u0120shows", - "\u00e5\u00b0\u00b1", - "\u0120tar", - "\u0120crit", - "\u0120glo", - "\u00ec\u012c\u00b5\u00eb\u012d\u012a\u00eb\u012d\u00a4", - "\u0120dead", - "\u0120President", - "\u0120thous", - "\u0120bal", - "ster", - "ex", - "\u0120absolutely", - "\u0120mic", - "\u0120practice", - "\u0120quality", - "\u0120lower", - "ogle", - "\u0120separ", - "ball", - "medi", - "\u0120review", - "\u0120App", - "\u0120ok", - "\u00e2\u0122\u012d", - "\u0120experien", - "\u0120concern", - "entially", - "more", - "\u0120Jo", - "apan", - "\u0120Ich", - "istic", - "\u0120fair", - "\u0120website", - "ires", - "\u0120By", - "\u0120travel", - "\u0120risk", - "\u0120mir", - "\u0120board", - "\u0120sen", - "\u0120parents", - "\u0120Wow", - "\u0120feed", - "\u0120save", - "\u0120serious", - "\u0120init", - "EL", - "undred", - "AS", - "\u0120van", - "orrow", - "\u0120worth", - "\u0120search", - "\u012016", - "\u0120parts", - "\u00d1\u0123\u00d1\u0124\u00d1\u012e", - "\u0120compan", - "\u0120movie", - "\u0120method", - "\u0120ill", - "\u0120wish", - "dy", - "\u0120item", - "\u0120minus", - "anger", - "\u0120voice", - "\u0120skin", - "\u0120areas", - "\u0120eight", - "\u0120obs", - "\u0120,", - "\u00d0\u00b0\u00d0\u00b9", - "\u0120oil", - "\u0120cy", - "\u0120baby", - "sy", - "\u0120employ", - "\u0120Ke", - "\u0120places", - "\u0120fix", - "\u0120est\u00c3\u00a1", - "\u00e3\u0123\u00a8", - "ived", - "\u0120lots", - "\u0120season", - "unk", - "alt", - "\u0120table", - "\u0120\u00d0\u00a2", - "\u00c3\u00a2", - "\u0120attention", - "\u00e3\u0123\u00aa", - "\u0120Her", - "\u0120age", - "\u0120pra", - "back", - "cil", - "\u0120network", - "rit", - "\u0120doc", - "\u0120aren", - "igen", - "\u0120\u00eb\u0126", - "\u00d8\u00af", - "ender", - "\u0120total", - "\u0120price", - "\u0120crazy", - "\u00ec\u013c", - "iqu", - "though", - "You", - "\u00d9\u0129", - "\u00e3\u0124\u0135", - "\u00cf\u0127", - "\u0120sat", - "\u0120bi", - "\u0120Die", - "\u0120sha", - "\u0120thanks", - "uh", - "\u0120stage", - "\u00d0\u00b0\u00d0\u00b6", - "\u0120Fl", - "\u0120leav", - "\u0120boy", - "\u0120af", - "\u00c3\u00b6n", - "\u0120Get", - "\u0120accept", - "\u0120enter", - "\u0120tur", - "\u0120si\u00c4\u013b", - "\u0120honest", - "\u00e3\u0122\u012e", - "\u0120sam", - "\u0120repl", - "ging", - "\u0120development", - "\u0120Act", - "ora", - "\u00e3\u0122\u012f", - "\u00e4\u00be", - "\u0120knows", - "\u0120image", - "\u0120Lord", - "\u00d0\u00b8\u00d1\u0124\u00d1\u012e", - "\u0120weeks", - "\u0120sex", - "\u0136\u00eb", - "\u0120hundred", - "\u0120sounds", - "\u0120learned", - "\u0120bud", - "\u0120\u00d1\u0123\u00d1\u0124", - "\u0120incred", - "\u00e2\u013b", - "\u0120nos", - "\u0120drop", - "\u0120ben", - "\u0120\u00d0\u013a", - "\u0120safe", - "ata", - "\u0120fuck", - "soci", - "\u0120dan", - "\u0120cross", - "10", - "mo", - "vert", - "\u012017", - "zie", - "\u00e5\u0137", - "\u0120dom", - "\u0120Bo", - "\u0120setting", - "\u0120involved", - "arily", - "\u0120sind", - "\u0120sus", - "\u0120worry", - "eth", - "\u00ea\u00b9\u012e", - "\u0120sun", - "\u0120hier", - "\u0120certainly", - "oul", - "orts", - "\u0120Er", - "\u0120Um", - "\u0120caus", - "\u0120natural", - "\u0120\u00c3\u00bc", - "\u0120cry", - "\u0120Sec", - "\u0120som", - "\u00e6\u00b2", - "\u0120education", - "\u00d0\u00b0\u00d0\u00b5\u00d1\u0124", - "\u0120multip", - "\u0120alone", - "\u0120eye", - "\u0120rate", - "\u0120Europe", - "\u00e8\u00bf", - "mon", - "\u0120fit", - "izing", - "pped", - "\u0120pressure", - "the", - "\u00d0\u00b8\u00d1\u0123", - "ites", - "\u0120Af", - "reci", - "attle", - "\u0120services", - "\u0120Google", - "\u00e9\u0123", - "\u0120cases", - "\u0120drive", - "\u0120challeng", - "uz", - "\u0120Mo", - "\u00ec\u013e\u00bc\u00eb", - "val", - "\u00e5\u0122\u012d", - "\u0120fol", - "\u0120\u00ec\u00a2", - "ffic", - "\u0120ra", - "\u0120sin", - "\u0120blue", - "\u0120affect", - "\u0120mis", - "\u0120shot", - "\u0120\u00d0\u00be\u00d0\u00b1", - "asing", - "\u0120signific", - "\u0120Che", - "\u0120\u00ea\u00b3", - "\u0120positive", - "\u00ec\u00a3", - "\u0120wie", - "\u012040", - "ording", - "\u0120From", - "\u00ea\u00b5", - "\u0120brand", - "\u0120trust", - "\u0120ple", - "\u0120communic", - "\u0120weight", - "\u0120asking", - "\u0120tax", - "\u0120Japan", - "\u00e3\u0123\u0141", - "\u0120\u00ed\u0137\u013a", - "ops", - "\u00cf\u0124", - "\u0120putting", - "\u0120roll", - "\u0120America", - "reg", - "\u0140\u00d7", - "atures", - "ension", - "\u0120Somet", - "\u0120original", - "ping", - "\u0120\u00c5\u0141", - "\u0120products", - "\u00e3\u0125\u00bc", - "\u0120contact", - "olution", - "\u0120goal", - "\u0120pow", - "\u0120performance", - "\u0120blood", - "ators", - "\u0120Mich", - "\u0120temper", - "\u0120Dan", - "\u0120sugg", - "\u00d1\u0124\u00d0\u00b8", - "\u0120imm", - "\u0120office", - "\u0120arri", - "\u0120comfort", - "\u0120\u00d0\u0136", - "\u0120suggest", - "\u0120plat", - "\u0124\u013a", - "19", - "\u0120om", - "\u0120seven", - "\u0120Cent", - "ille", - "\u0120concept", - "\u0120bag", - "\u00c3\u00bcn", - "ively", - "\u0120div", - "mos", - "\u00e6\u012b", - "\u0120feels", - "\u0120ir", - "akes", - "ley", - "\u0120particip", - "\u0120\u00d0\u013c", - "fl", - "just", - "\u0120sil", - "\u0120Pa", - "AL", - "\u0120gotta", - "\u0120fan", - "\u0120challenge", - "\u0120companies", - "\u0120People", - "", - "\u0120heroes", - "\u0120Boston", - "\u0120dependent", - "\u0120motivation", - "flix", - "\u0120seam", - "\u00d0\u00ba\u00d0\u00b8\u00d0\u00b5", - "\u0120drain", - "oded", - "\u0120guilty", - "\u0120Jenn", - "ingen", - "\u0120granted", - "\u0120Kelly", - "\u0120Sav", - "\u0120Uncle", - "\u0120Honestly", - "ELI", - "\u0120navigate", - "\u0120blessed", - "core", - "\u0120earning", - "\u0120signals", - "\u0120disk", - "ials", - "\u0120ages", - "\u00e6\u0127", - "\u0120particle", - "\u0120\u00d1\u0129\u00d0\u00b5\u00d1\u0122", - "\u0120cann", - "\u0120tier", - "\u0120statements", - "\u00ea\u00b3\u0142\u00ec\u013c\u0136", - "\u0120\u00eb\u0137\u012e\u00eb\u00ac\u00b8\u00ec\u0139\u0132", - "\u0120Cho", - "\u0120polar", - "an\u00c3\u00a7", - "\u0120Kenn", - "\u0120Ni", - "\u0120Fight", - "organ", - "\u00e9\u0137", - "\u0120Cha", - "\u0120S\u00c3\u0143", - "\u00e3\u0125\u00aa", - "\u0120slic", - "\u0120certific", - "\u0120template", - "\u0120Federal", - "\u0120consideration", - "\u0120explo", - "\u0120Main", - "\u0120NE", - "\u0120alongside", - "\u0120dressed", - "\u0120Point", - "\u0120environments", - "\u0120pr\u00c3\u00b3xim", - "\u0120daar", - "\u0120prompt", - "\u0120pursue", - "\u0120entertainment", - "\u0120throat", - "\u0120problema", - "\u0120mart", - "\u00ec\u00bc", - "\u0120provider", - "\u00d8\u012e", - "\u0120\u00d7\u0139", - "inte", - "making", - "\u0120stroke", - "\u0120tissue", - "Un", - "\u0120precious", - "\u0120Arts", - "inking", - "\u0120\u00d0\u0140\u00d0\u00bd", - "\u0120\u00d0\u00b8\u00d1\u0123", - "nah", - "\u0120\u00d0\u0137\u00d1\u0123\u00d0\u00bb\u00d0\u00b8", - "\u0120corners", - "\u0120tricky", - "inch", - "lijk", - "\u0120pressing", - "level", - "ANG", - "\u0120radiation", - "\u00ec\u0126\u0142", - "\u0120confront", - "\u0120vet", - "\u0120representative", - "\u0120propag", - "\u0120crap", - "\u0120Dec", - "\u0120ramp", - "\u00d0\u00b5\u00d0\u00bf\u00d0\u00b5\u00d1\u0122\u00d1\u012e", - "u\u00c3\u00a9s", - "essen", - "cription", - "\u0120bills", - "\u0120Matthew", - "\u0120anime", - "\u00e1\u00ba\u00a5t", - "\u0120lowest", - "has", - "screen", - "ograp", - "\u00d0\u00b0\u00d0\u00bb\u00d0\u00be", - "inton", - "\u0120Jah", - "\u00e8\u0122\u0127", - "it\u00c3\u0142", - "\u0120kay", - "\u0120rotation", - "\u0120Were", - "abei", - "\u0120trials", - "\u0120lever", - "ighty", - "\u0120spoon", - "\u0120hunt", - "cling", - "\u0120dism", - "\u0120\u00d0\u00b1\u00d0\u00be\u00d0\u00bb\u00d1\u012e\u00d1\u012a", - "\u0120assault", - "\u0120\u00ed\u013a\u0137", - "\u0120weekly", - "\u0120mismo", - "\u0120genetic", - "ulpt", - "\u0120Student", - "\u0120realistic", - "\u0120authentic", - "\u00e6\u012b\u0135", - "asta", - "\u0120arrested", - "\u0120guidelines", - "\u0120\u00d7\u013e\u00d7\u0132", - "\u0120\u00d0\u00b4\u00d0\u00b0\u00d0\u00b2", - "\u0120Coming", - "f\u00c3\u00bcr", - "\u0120requests", - "\u0125\u0132", - "\u0120analyze", - "\u0120interess", - "\u0120halt", - "\u0120Oper", - "onom", - "\u0120duck", - "\u0120withd", - "ser", - "\u0120\u00cf\u012e", - "\u0120History", - "\u0120youtube", - "\u00e3\u0124\u012f", - "\u0120saber", - "walk", - "font", - "\u0120overview", - "39", - "\u00c3\u00bcy", - "etti", - "\u0120frozen", - "\u0120flesh", - "\u00c4\u0141i", - "\u0120PM", - "\u0120\u00ec\u013b\u0122", - "\u00e9\u00a2", - "\u00d1\u0128\u00d0\u00b8\u00d0\u00b8", - "\u0120\u00ea\u00b8\u00b0\u00eb", - "\u00ed\u0123\u00ac", - "\u0120prose", - "oooo", - "rates", - "WS", - "\u0120automatic", - "\u0120collecting", - "\u00c5\u0133", - "\u0120neighbors", - "\u00c2\u00bb.", - "\u0120Expl", - "\u0120circul", - "cover", - "weg", - "\u0120sticks", - "\u0120eller", - "\u0120www", - "\u0120dorm", - "\u0120Exper", - "\u0120statistics", - "\u0120emails", - "\u0120grave", - "imiz", - "HS", - "\u0120uit", - ",'", - "\u0120laser", - "\u00e8\u012b", - "\u0120\u00d1\u0124\u00d0\u00b5\u00d0\u00bc", - "\u00d1\u012d\u00d1\u012a", - "\u00d1\u012b\u00d1\u0133", - "\u0120genau", - "\u0120tienen", - "\u0120meditation", - "\u0120Organ", - "\u0120estimate", - "\u0120\u00eb\u00ac\u00b4\u00ec", - "lets", - "\u0120n\u00c3\u0142y", - "\u0120mindset", - "\u0120reson", - "\u0120m\u00c3\u00a9s", - "\u0120numerous", - "\u0120vielleicht", - "\u0120Third", - "uous", - "\u0120Dead", - "\u00d0\u00b0\u00d0\u00bd\u00d0\u00b4", - "HN", - "\u0120racing", - "\u0120agents", - "\u0120Ut", - "\u0120tear", - "\u0120HP", - "\u0120chemistry", - "\u0120survival", - "\u00e6\u0138\u00b0", - "\u0120convinced", - "\u0120;", - "\u0120regulations", - "\u0120ES", - "\u00e5\u0134\u012e", - "300", - "\u0120ense", - "\u0120\u00ec\u00b5", - "\u0120dict", - "GA", - "\u0120ah\u00c3\u0143", - "\u00e5\u012d\u0137", - "\u0120tej", - "\u0120\u00d0\u00be\u00d1\u0123\u00d1\u0124", - "\u0120Elect", - "\u0120intellectual", - "\u0120bias", - "\u0120burden", - "\u00e7\u0124\u00b9", - "\u0120\u00ec\u0138\u00b4\u00eb\u0138\u00bb", - "\u0120cheer", - "\u0120soph", - "\u0120portfolio", - "uba", - "\u0120estos", - "TV", - "For", - "\u0120ash", - "\u0120kommer", - "\u0120collective", - "\u0120wrest", - "\u0120Jetzt", - "\u0120Wat", - "reich", - "\u0120primer", - "active", - "\u0120mie", - "icked", - "\u0120hunting", - "\u0120testim", - "\u0120compassion", - "\u0120\u00d8\u00b1", - "\u0120brut", - "\u0120salad", - "\u00d0\u00be\u00d0\u00b1\u00d1\u012b\u00d0\u00b5", - "\u0120solving", - "\u0120floating", - "\u00e7\u00b7", - "\u0120attractive", - "\u00d9\u012a\u00d9\u0126", - "\u0120perd", - "iffer", - "\u0120sculpt", - "hhh", - "\u0120Week", - "\u0120enthus", - "\u0120nad", - "\u0120merch", - "\u0120\u00ed\u013b\u0137", - "\u0120mile", - "\u00e5\u00a5\u00bd\u00e4\u00ba\u0128", - "\u0120\u00ce\u00b8", - "\u0120\u00eb\u0124\u013a\u00eb", - "\u00e9\u0129\u012f", - "38", - "\u0120chains", - "\u0120Almost", - "\u0120tickets", - "rin", - "\u0120CC", - "\u0120distributed", - "abetes", - "\u0120temperatures", - "\u0120gained", - "\u0120flexibility", - "\u0120screaming", - "\u0120abroad", - "uno", - "\u0120entrepreneurs", - "\u0120Network", - "\u0120Canadian", - "\u0120prev", - "\u0120s\u00c3\u00b6", - "\u0120\u00d1\u0124\u00d0\u00b5\u00d0\u00b1\u00d1\u0131", - "\u0120Poke", - "\u0120Pod", - "\u0120Turkey", - "\u00e7\u0131\u00be\u00e5\u013e\u00a8", - "\u0120abstract", - "\u0120snake", - "\u0120Amy", - "\u0120\u00eb\u012c\u0132\u00eb\u0124\u012e", - "\u0120brave", - "\u0120\u00ec\u0140\u012a\u00ec\u0138\u00b4\u00ec\u013c\u0136", - "\u0120Kal", - "\u01202007", - "\u00c3\u00a1rio", - "\u0120marked", - "gines", - "\u0120alloc", - "ONG", - "\u0120scientist", - "\u0120esca", - "\u0120racism", - "\u00d7\u0133\u00d7", - "\u0120Sams", - "\u0120Penn", - "\u0120loads", - "\u0120\u00e0\u00ae\u00a8", - "\u00c3\u00bcber", - "Me", - "ix\u00c3\u00b2", - "\u0120per\u00c3\u00b2", - "anne", - "\u0120expressed", - "\u00d0\u00bc\u00d0\u00b5\u00d1\u0122", - "\u0120moet", - "\u0120returning", - "nia", - "\u0120expon", - "Pro", - "\u0120loyal", - "ML", - "\u0120lamp", - "\u0120shy", - "\u0120composition", - "\u0120Ly", - "\u0120magnetic", - "\u0120premier", - "\u0120measured", - "\u0120summary", - "\u0120attacked", - "\u0120finishing", - "\u00d0\u0139", - "\u00e7\u00a5", - "\u0120sits", - "\u0120hydrogen", - "\u0120mai", - "\u0120Deutsch", - "as\u00c4\u00b1", - "\u0120obtain", - "vie", - "\u0120soit", - "\u0120\u00eb\u00b0\u0136", - "\u0120lane", - "\u0120consegu", - "\u00d0\u00b2\u00d0\u00be", - "\u0120ease", - "akin", - "\u0120Fa", - "\u0120untuk", - "\u0120burst", - "\u0120cum", - "al\u00c4\u00b1m", - "\u00c3\u00bablic", - "idi", - "\u0120Royal", - "\u0120Kon", - "\u0120commonly", - "\u0120removing", - "\u0120jur", - "ilib", - "\u0120anch", - "\u00ed\u0138\u012b", - "\u00c6\u00b0\u00e1\u00bb\u00a3", - "\u0120\u00d0\u013e\u00d1\u012d", - "\u0120Anth", - "\u0120S\u00c3\u00a5", - "\u0120interrupt", - "\u0120stere", - "\u0120OS", - "onym", - "tery", - "\u0120Maria", - "\u00ea\u00b2\u0125", - "\u0120exploring", - "\u0120transparent", - "\u0120fate", - "\u0120Jung", - "\u0120grup", - "\u0120darker", - "\u0120Doug", - "\u0120mane", - "\u00e6\u0136\u00be", - "\u00e1\u00ba\u00a1i", - "dri", - "look", - "\u0120Design", - "\u0120tutaj", - "\u0120horizontal", - "reon", - "orte", - "\u0120Correct", - "\u0120Steven", - "\u0120vine", - "02", - "i\u00c4\u0129", - "\u0120siempre", - "\u0120Key", - "\u00e5\u0125\u0131", - "\u0120Games", - "\u0120naar", - "\u0120shocked", - "elve", - "\u0120Rose", - "\u00ec\u012d\u00ac", - "\u0120stopping", - "ohl", - "\u0120Mix", - "\u0120suffered", - "\u0120sigma", - "\u0120weakness", - "\u0120Ow", - "\u00e0\u00b8\u00b5\u00e0\u00b9\u012a", - "IF", - "\u0120\u00e0\u00ae\u0127", - "aded", - "\u0120Netflix", - "anes", - "\u0120remained", - "iry", - "\u0120rip", - "ellt", - "\u0120silent", - "\u0120proven", - "\u0120toxic", - "\u0120alumin", - "\u0120multipl", - "aland", - "\u012034", - "06", - "\u0120Bru", - "\u0120\u00ec\u0142\u0137\u00eb\u00a7\u0132", - "Just", - "boy", - "\u0120shoe", - "\u0120creature", - "\u0120headed", - "\u0120\u00d0\u00be\u00d1\u0124\u00d0\u00ba", - "\u00e6\u00b1", - "\u0120essence", - "\u0120remarkable", - "\u0120n\u00c3\u00bamer", - "\u0120drew", - "\u0120puzzle", - "\u0120Library", - "\u0120Fu", - "ashes", - "kk", - "\u0120Ist", - "\u00a6\u00b0", - "\u0120Bry", - "\u0120ceremony", - "\u0120\u00e0\u00ae\u0130", - "\u0120cri", - "equ", - "\u00e3\u0124\u00a2", - "\u0120prize", - "\u0120dimensions", - "ogram", - "\u0120leather", - "\u0120populations", - "uum", - "\u0120vegan", - "\u00d1\u0131\u00d0\u00b4", - "\u0120c\u00c3\u00b3mo", - "\u00e5\u0126", - "\u0120strip", - "\u00e5\u00a3", - "\u0120vacation", - "\u0127\u0137", - "\u0120meals", - "ilipp", - "\u0120ents", - "aram", - "richt", - "\u0120grain", - "\u0120Spain", - "\u0120cheek", - "\u0120Aff", - "ION", - "\u0120Bring", - "\u012038", - "ielen", - "ulu", - "\u0120\u00d0\u00b1\u00d0\u00be\u00d0\u00bb\u00d1\u012e\u00d1\u012a\u00d0\u00b5", - "\u0120announcement", - "\u0120\u00d1\u0124\u00d1\u0125\u00d1\u0124", - "\u0120Prophet", - "ardo", - "37", - "\u0120woke", - "\u0120translation", - "\u0120NOT", - "\u0120CL", - "\u0120d\u00c3\u00bc\u00c5\u0141", - "\u00d1\u0128\u00d1\u0138", - "acer", - "\u0120Loc", - "\u0120perception", - "NO", - "\u0120diesen", - "Look", - "heart", - "aved", - "\u0120boundary", - "\u0120flows", - "\u00d1\u0133\u00d0\u00bc", - "\u0120arguments", - "\u0120elections", - "\u00c4\u00b1s", - "\u0120heck", - "\u0120suitable", - "\u0120fiber", - "\u0120Stra", - "xy", - "\u0120Hum", - "\u0120monthly", - "uper", - "\u0120golf", - "\u0120lately", - "\u0120Gard", - "\u0120Ren", - "\u0120Ast", - "\u0120Fant", - "\u00d0\u00b0\u00d1\u0123\u00d1\u0123", - "\u0120obser", - "\u00eb\u00a1\u013e", - "\u0120easiest", - "\u012f\u0136\u00eb", - "\u0120websites", - "pol", - "\u0120cocon", - "\u0120\u00e0\u00ae\u0129", - "\u0120Veg", - "\u0120walks", - "\u0120intro", - "\u0120directed", - "\u0120Anna", - "\u0120\u00eb\u0135\u00a4\u00ec\u0138\u00b4", - "\u0120Eastern", - "\u0120Saint", - "\u0120Bow", - "\u0120roast", - "\u0120URL", - "\u0120jeden", - "uras", - "aja", - "\u0120semi", - "\u0120rapidly", - "\u0120targets", - "\u0120Control", - "\u0120bah", - "\u0120reflection", - "\u0120creativity", - "holders", - "\u0120\u00ec\u013a\u00ac\u00eb", - "\u0120amongst", - "\u0120feeding", - "\u00d1\u012f\u00d1\u0124\u00d0\u00be\u00d0\u00bc\u00d1\u0125", - "\u0120\u00d0\u00b2\u00d0\u00b8\u00d0\u00b4\u00d0\u00b5", - "\u0120\u00eb\u00a7\u012e\u00eb\u0135\u00a4", - "\u0120Smart", - "\u0120reliable", - "\u0120vezes", - "\u0120\u00d7\u00a8", - "chuckles", - "azione", - "\u0120Williams", - "\u0120a\u00c3\u00a7", - "\u0120slee", - "\u00d0\u00b5\u00d1\u012b", - "\u0120timeline", - "\u0120thorough", - "\u00e1\u00bb\u012f", - "\u0120Ot", - "\u00e1\u00ba\u00a1n", - "\u0120imagination", - "\u0120mechanics", - "rist", - "\u0120claimed", - "\u00cf\u0126\u00ce\u00b7", - "\u00c3\u00aate", - "\u0120Hurry", - "\u0120iPad", - "\u0120constru", - "\u0120Cla", - "\u0120Als", - "\u00e4\u00bc\u013c", - "utz", - "\u0120cultures", - "\u0120\u00ec\u0138\u00b4\u00eb\u0138\u00bb\u00ea\u00b2\u012e", - "\u0120belongs", - "\u0120yer", - "\u0120Doesn", - "\u0120geomet", - "\u0120bid", - "\u0120foam", - "\u0120hob", - "\u0120Britain", - "\u0120substance", - "\u0120anniversary", - "\u0120\u00eb\u0126\u012a", - "\u0120noted", - "\u0120governor", - "\u0120stocks", - "31", - "\u0120diye", - "\u00ec\u012c\u00a4\u00eb", - "\u0120reb", - "zel", - "\u0120multiply", - "\u0120operator", - "\u0126\u00a4\u00ec\u013c\u0136", - "\u0120waters", - "\u0120d\u00c3\u00a4r", - "\u0120unser", - "\u0120Elizabeth", - "\u00e9\u00ab\u013a", - "\u0120increasingly", - "\u0120Gro", - "\u0120engines", - "irs", - "\u00d8\u00ab", - "\u0120treasure", - "PC", - "inction", - "iri", - "\u0120accum", - "\u0120variation", - "\u0120pom", - "\u0120titles", - "\u0120Fest", - "\u00c3\u00b3s", - "\u0120elder", - "nym", - "run", - "\u00d1\u0131\u00d0\u00b2", - "\u0120innovative", - "\u0120nombre", - "\u0120coinc", - "\u0120franch", - "\u0120entonces", - "\u0120nichts", - "\u0120exclusive", - "\u0120Cheers", - "\u0120Bi", - "uje", - "\u00e6\u0143\u00a1", - "\u0120pok", - "\u0120Prem", - "\u0120rocket", - "ELIPE", - "\u0120hospitals", - "rium", - "\u0120juste", - "\u0120hammer", - "\u0120quantum", - "\u0120responses", - "lly", - "endi", - "\u0120actively", - "\u0120fridge", - "iate", - "long", - "\u0120quem", - "\u0120deaths", - "\u0120superior", - "cken", - "\u00ec\u013f\u00b4\u00ec\u0139\u0132", - "ktop", - "\u0120gathered", - "\u00a3\u00a8", - "\u0120dazu", - "\u0120recipes", - "\u0120buzz", - "cen", - "\u0120anytime", - "onsense", - "\u0120circles", - "\u0120solved", - "\u0120\u00ec\u012d\u0142", - "\u0120coronavirus", - "\u0120Luke", - "\u0120bubb", - "\u0120contempor", - "rzy", - "\u0120Jane", - "\u0120\u00d0\u00b4\u00d0\u00be\u00d0\u00bc", - "\u0120screws", - "\u0120hybrid", - "\u0120casual", - "\u0120selbst", - "being", - "\u0120\u00c4\u0132", - "\u0120Columb", - "\u0120\u00d1\u0127\u00d0\u00be\u00d1\u0129", - "\u0120bucket", - "\u0120evaluate", - "\u0120idol", - "\u0120reputation", - "\u0120\u00ec\u0128\u012e\u00eb", - "\u00d9\u012a\u00d8\u00b1", - "\u0120hecho", - "\u0120poem", - "\u0120subjects", - "plant", - "\u0120Beh", - "\u0120Speaking", - "\u0120batteries", - "\u0120followers", - "\u00c3\u00b6l", - "\u0120gently", - "\u0120sixt", - "\u0120parameter", - "\u0120ikke", - "\u0120Tour", - "\u0120DJ", - "otte", - "\u0120Jahren", - "\u0120preparation", - "\u0120\u00d0\u00b4\u00d1\u0125\u00d0\u00bc", - "\u0120800", - "cop", - "iking", - "\u0120\u00eb\u00ac\u00b8", - "\u0120\u00d0\u00bd\u00d1\u0125", - "\u0120\u00d0\u00bb\u00d0\u00b5\u00d1\u0124", - "\u00e5\u0132\u012e", - "\u0120Ide", - "\u0120\u00ec\u00a1\u00b0\u00ea\u00b8\u012a", - "\u0120laughter", - "\u0120molecules", - "\u0120Rest", - "\u0120observed", - "dzie", - "\u0120advertising", - "erto", - "\u0120moins", - "\u0120MIT", - "\u0120excit", - "\u0120tum", - "\u0120tyl", - "\u0120invested", - "\u0120pharm", - "\u0120unexpected", - "\u0120phi", - "otype", - "weise", - "\u0120ge\u00c3\u00a7", - "jourd", - "\u0120horses", - "n\u00c4\u0127", - "=\"", - "\u0120SM", - "\u0120fib", - "\u0120clips", - "\u00e7\u0137\u00b6", - "\u00e5\u00a6\u0124\u00e6\u0140\u013e", - "\u0120regime", - "\u0120rotate", - "rou", - "nik", - "\u0120armor", - "\u00f0\u0141\u013a", - "\u00d0\u00b5\u00d1\u0122\u00d0\u00b0", - "\u00e5\u00ba\u00a6", - "\u0120Och", - "\u0120richtig", - "\u00c3\u00bczel", - "aneously", - "mek", - "\u00e9\u012e\u00af", - "\u0120Xiao", - "\u0120existed", - "worth", - "\u00e3\u0123\u00a3\u00e3\u0123\u00a8", - "\u0120naught", - "\u0120hei\u00c3\u0141t", - "\u0120Bal", - "\u0120resid", - "ivot", - "omatic", - "\u0120hired", - "\u0120gradually", - "\u0120onions", - "\u0120compat", - "\u0120intim", - "\u0120jew", - "\u0120contribution", - "\u0120Ire", - "acji", - "\u0120slice", - "\u0120immun", - "\u0120Rus", - "\u0120grows", - "\u0120Similarly", - "\u0120hardest", - "\u0120struck", - "\u0120measurement", - "...]", - "they", - "\u0120\u00ec\u0142\u0122\u00eb", - "\u0120sneak", - "\u0120applies", - "\u0120\u00d0\u00bd\u00d0\u00b5\u00d0\u00bc", - "\u00e6\u0135", - "\u00d7\u0133\u00d7\u00a8", - "\u0120\u00d0\u00a7\u00d1\u0124\u00d0\u00be", - "\u0120outro", - "\u0120innocent", - "\u0120mog", - "\u0120Samsung", - "\u0120mercy", - "\u0120handling", - "\u0120intervention", - "idays", - "got", - "\u0120curric", - "\u0120boundaries", - "\u0120confusing", - "\u013f\u00bc\u00eb\u012c\u0136", - "\u00e6\u0129", - "\u0120stitches", - "\u00c3\u0143vel", - "\u0120tunnel", - "it\u00c3\u00a4", - "\u0120gost", - "imy", - "\u0120czas", - "\u0120m\u00c3\u00a9", - "\u0120catal", - "\u0120Simon", - "\u0120LIAM", - "mic", - "\u0120\u00d0\u00a4", - "\u0120eyel", - "isas", - "\u0120CPU", - "\u0120Dou", - "\u0120n\u00c3\u00a4ch", - "\u0120infinity", - "\u0120rif", - "\u0120Peace", - "\u0120Cu", - "\u0120minimal", - "\u0120listened", - "\u0120pole", - "halb", - "\u0120loaded", - "\u0120steady", - "\u0120Besides", - "\u00c3\u00aam", - "\u0120lap", - "\u0120coop", - "\u0120friendship", - "world", - "\u0120geh", - "\u0120tylko", - "\u0120Laura", - "\u0120surrounded", - "\u0120Event", - "\u0120chap", - "\u0120Wonder", - "break", - "\u0120drove", - "\u0120broader", - "\u0120chi", - "Fi", - "\u0120gehen", - "\u0120western", - "\u0120intelligent", - "\u0120persist", - "\u0120founded", - "\u00e3\u0123\u0135\u00e3\u0123\u00a8", - "\u0120historic", - "\u0120fr\u00c3\u00a5", - "cks\u00c3\u00a5", - "\u0120handy", - "\u0120symp", - "\u0120rows", - "\u0120nutri", - "bur", - "\u0120Leon", - "\u0120sistema", - "\u0120extensive", - "\u0120\u00d1\u0125\u00d0\u00b2", - "\u00ed\u0131", - "\u0120nights", - "\u0120c\u00c3\u00a1c", - "\u0120counting", - "\u0120Must", - "allow", - "\u00d0\u00b5\u00d1\u0123\u00d1\u0123", - "Mom", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d0\u00b4\u00d0\u00be", - "\u0120barrel", - "\u00e3\u0125\u0140", - "ARD", - "\u0120installation", - "\u0120insect", - "\u0120\u00eb\u0127\u00b8\u00eb", - "uj\u00c4\u0127", - "\u0120\u00c4\u0133i", - "\u0120packed", - "\u0120fiction", - "Now", - "\u0120Yay", - "\u0120pert", - "rons", - "unde", - "aches", - "\u0120styles", - "\u0120apr\u00c3\u00a8s", - "oku", - "\u0120Vice", - "\u00c4\u00b1n\u00c4\u00b1z", - "comm", - "\u0120assigned", - "\u0120interactions", - "\u0120acab", - "FELIPE", - "\u0120rescue", - "\u0120industries", - "\u0120Andy", - "\u0120praise", - "\u0120flame", - "\u0120snack", - "\u00ed\u0124", - "\u00e7\u0123", - "\u0120swo", - "render", - "\u0120boards", - "\u0120\u00d1\u0124\u00d0\u00be\u00d0\u00bc", - "enne", - "\u0120pasta", - "\u0120devil", - "\u0120Fel", - "\u0120hatte", - "\u0120colleg", - "eh", - "\u00ec\u00bb", - "\u00e3\u0123\u0135\u00e3\u0123\u00ae", - "\u0120productive", - "forward", - "\u00d0\u00b8\u00d0\u00bf", - "\u0120smartphone", - "\u0120invis", - "\u0120bum", - "\u0120whoa", - "\u00ec\u0140\u0126", - "\u0120ocks\u00c3\u00a5", - "\u0120Lang", - "\u0120Syria", - "\u0120sesi", - "\u00ce\u00af\u00ce\u00b1", - "\u0120approval", - "48", - "\u0120\u00d0\u00be\u00d0\u00b4\u00d0\u00b8\u00d0\u00bd", - "\u0120\u00eb\u0138", - "\u0120Harr", - "\u0120Administ", - "\u0120\u00d7\u00a4", - "\u0120Dean", - "fi", - "\u0120citizen", - "\u0120shark", - "05", - "\u0120boil", - "\u0120indicate", - "\u00e5\u00a1", - "Are", - "\u0120layout", - "\u0120refr", - "\u0120Pacific", - "AAAA", - "\u0120Australian", - "gression", - "Voice", - "\u00d0\u00b0\u00d0\u00bb\u00d1\u0123\u00d1\u0131", - "\u0120shelter", - "To", - "aupt", - "\u0120evaluation", - "apor", - "\u0120currency", - "\u0120\u00d0\u00bc\u00d0\u00bd\u00d0\u00be\u00d0\u00b3\u00d0\u00be", - "igos", - "\u00e3\u0123\u00b0", - "\u0120oct", - "\u0120royal", - "\u00e8\u00b3", - "asil", - "\u0120Children", - "\u0120rien", - "\u0120\u00eb\u0135\u013e\u00eb", - "\u0120barrier", - "\u0120ejemplo", - "\u0120ek", - "ND", - "esp", - "\u00d0\u00b5\u00d0\u00bd\u00d0\u00b0", - "\u0120pic", - "\u0120killer", - "\u0120integrate", - "\u0120fewer", - "\u0120disabilities", - "\u0120....", - "\u0120triangle", - "\u0120fees", - "\u0120widely", - "emi", - "\u0120overwhelming", - "\u0120zomb", - "\u0120bere", - "\u0120hood", - "\u0120Aye", - "\u0120Harvard", - "ev", - "\u0120\u00cf\u0126\u00ce\u00bf\u00cf\u0127", - "\u0120cups", - "\u0120Auch", - "zona", - "\u01201990", - "\u0120wei\u00c3\u0141", - "\u0120crunch", - "\u00e6\u00a5", - "\u0120\u00d0\u00b7\u00d0\u00b0\u00d0\u00b2", - "\u0120measuring", - "\u0120stations", - "\u0120Stephen", - "\u0120shortly", - "\u0120signing", - "\u0120comedy", - "omo", - "\u0120suggestions", - "\u0120signature", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b8\u00d0\u00b2", - "\u0120disorder", - "aska", - "\u0120worlds", - "\u0120precisely", - "norm", - "rav", - "\u0120Civil", - "Inter", - "\u0120Certain", - "\u0120injured", - "\u0120suggests", - "\u0120Golden", - "\u0120cyber", - "\u0120\u00d8\u00b4", - "\u0120temporary", - "\u0120cooper", - "\u0120voted", - "\u0120ought", - "\u00e1\u00ba\u00a5y", - "xual", - "\u0120panels", - "\u012095", - "\u0120handsome", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d0\u00b2", - "\u0120permit", - "\u0120kein", - "\u0120badly", - "\u0120notifications", - "iza", - "\u0120Notice", - "\u0120inclusive", - "\u0120answering", - "\u0120\u00ed\u0139", - "uld", - "\u00ed\u0127\u012e", - "\u0120nowadays", - "\u012037", - "\u0120bolt", - "\u0120static", - "\u0120Hop", - "\u0120avant", - "ajo", - "\u0120\u00eb\u00a7\u013d\u00ec\u0140\u012a", - "\u0120fifty", - "\u0120Final", - "\u0120scores", - "\u0120Tap", - "\u0120cyl", - "\u0120convince", - "\u0120anyways", - "oda", - "\u0120\u00ec\u0137\u00bc", - "\u0120serves", - "\u0120\u00d1\u0124\u00d0\u00b0\u00d0\u00ba\u00d0\u00be\u00d0\u00b9", - "\u0120Zoom", - "\u0120savings", - "ulo", - "\u0120southern", - "viewer", - "\u0120hoje", - "\u0120seja", - "\u0120representing", - "\u012a\u00eb\u012f\u013a", - "lik", - "\u0120Somebody", - "\u0120beast", - "\u0120sticking", - "\u0120insist", - "\u0120talented", - "\u0120explaining", - "\u0120attorney", - "\u00e9\u0125\u00a8", - "\u0120stairs", - "\u0120Dog", - "\u00ed\u012d", - "\u0120cig", - "\u0120shaped", - "\u0120sons", - "\u00cf\u0123\u00ce\u00b9", - "utt", - "\u0120\u00ec\u0136", - "\u0120parad", - "\u00ec\u013f\u00b8\u00eb\u012f\u00b0", - "\u0120horn", - "\u0120Jour", - "anno", - "\u0120worldwide", - "\u00e5\u012c\u013d", - "\u0120participation", - "\u00a6\u0126", - "\u0120m\u00c3\u00b3w", - "\u0120burned", - "\u0120writers", - "allah", - "\u0120Fund", - "\u0120clever", - "\u0120Leute", - "bin", - "\u0120beating", - "foot", - "\u0120\u00ec\u013d\u0132", - "\u0120Studio", - "\u0120vag", - "bey", - "rze", - "\u0120opposition", - "\u0120\u00d0\u00b6\u00d0\u00b8\u00d0\u00b7", - "who", - "\u0120\u00ea\u00b1\u00b4", - "\u0120trace", - "\u0120\u00d0\u00b4\u00d0\u00b5\u00d0\u00bd\u00d1\u012e", - "\u0120epid", - "\u0120gesch", - "\u0120Nar", - "\u0120BE", - "\u00d1\u0125\u00d0\u00b9", - "\u0120Sign", - "edly", - "\u0120clay", - "\u0120instantly", - "\u0120gathering", - "\u0120Galaxy", - "\u0120bored", - "\u0120Buddh", - "c\u00c3\u00a9", - "\u0120mam", - "\u0120slope", - "\u0120\u00eb\u012d\u00a4\u00ec\u013f\u012e", - "\u0120sch\u00c3\u00b6n", - "\u0120pir", - "gef", - "amer", - "\u0120h\u00c3\u00b6", - "\u0120colleague", - "\u0120presents", - "adium", - "\u0120\u00e0\u00ae\u00b5", - "\u0120falar", - "beep", - "\u0120dried", - "isms", - "\u0120rope", - "\u0120workshop", - "\u0120estud", - "\u0120bands", - "\u0120themes", - "\u00e5\u0127\u00ac", - "\u00d9\u012c\u00d8\u00b1", - "\u00e5\u0132\u0130", - "\u0120reminder", - "\u00d1\u0124\u00d1\u0125", - "\u0120Bh", - "\u0120coconut", - "\u0120\u00d1\u0123\u00d1\u0124\u00d0\u00be", - "\u0120Channel", - "\u0120immigration", - "\u00c3\u00a4s", - ".....", - "\u00e4\u00b8\u00bb", - "\u00e7\u013b\u00bd", - "stop", - "\u0120\u00d0\u00ba\u00d0\u00b0\u00d1\u0122", - "\u0120coins", - "\u0120\u00d1\u0129\u00d0\u00b0\u00d1\u0123", - "\u0120destruction", - "lined", - "\u0120barriers", - "antine", - "\u0120printed", - "\u0120congratulations", - "\u0120Heart", - "\u0120inqu", - "tha", - "\u0120hardly", - "\u0120Aven", - "\u0120tinha", - "\u0120Sony", - "\u0120NF", - "\u0120graduates", - "\u0120squeeze", - "eremy", - "\u00cf\u0126\u00ce\u00b9", - "\u0120epic", - "\u0120Ju", - "\u0120olm", - "\u0120Laughter", - "\u0120beliefs", - "\u0120Cru", - "\u0120True", - "\u0120Soul", - "oween", - "\u0120romantic", - "\u0120\u00d0\u00b7\u00d0\u00b2", - "\u0120anos", - "\u0120Yup", - "\u00e9\u013a\u00bf", - "dim", - "\u0120infer", - "\u0120\u00d0\u00b7\u00d0\u00b0\u00d0\u00bc", - "\u0120soc", - "uka", - "\u0120precise", - "\u0120dropping", - "\u0120clue", - "\u0120errors", - "charge", - "\u0120Pu", - "ometer", - "\u0120lambda", - "acional", - "\u0120Dong", - "\u0120chamber", - "\u0120thankful", - "\u0120Nu", - "\u0120Hawai", - "\u0120info", - "\u0120activate", - "\u0120Qual", - "\u0120qued", - "\u00d1\u0125\u00d0\u00bb\u00d1\u012e", - "\u0120cloth", - "\u00e5\u0138\u013e", - "\u0120wichtig", - "55", - "\u0120otra", - "ographer", - "\u0120curios", - "\u01201980", - "\u0120empres", - "dess", - "eur", - "\u0120cluster", - "arter", - "obile", - "\u0120Yan", - "\u0120Adv", - "\u0120discipline", - "\u0120\u00ec\u0142\u0137\u00eb\u0131\u0126", - "\u0120Place", - "\u0120Select", - "TE", - "\u0120\u00d0\u00b1\u00d1\u012d\u00d0\u00bb\u00d0\u00b0", - "\u0120whis", - "\u0120bay", - "\u0120Dor", - "encing", - "\u0120repet", - "\u0120ficar", - "pad", - "\u0120fog", - "uyor", - "\u0120snap", - "ibt", - "\u0120sobie", - "\u0120appointment", - "\u0120Ry", - "\u0120ceiling", - "ourse", - "\u0120writes", - "\u0120Afghanistan", - "\u0120mos", - "aze", - "\u0120penal", - "\u0120crystal", - "ICE", - "\u00ea\u00b0\u0132", - "\u00e9\u0141", - "\u0120Tesla", - "\u0120theories", - "\u0120appeal", - "\u0120newspaper", - "\u0120cookies", - "\u00e6\u00a9", - "\u0120\u00d8\u00a7\u00d9\u0126\u00d9\u0126", - "\u0120maj", - "\u0120Getting", - "kommen", - "\u0120Heaven", - "ells", - "\u0120divine", - "\u00c4\u00ab", - "\u0120akt", - "\u0120hopes", - "\u0120Chen", - "wegen", - "***", - "\u0120Frage", - "\u0120\u00d0\u00bd\u00d0\u00b8", - "\u00e0\u00b8\u00b9", - "minister", - "nesota", - "which", - "\u0120explicit", - "\u0120verdad", - "\u0120graduated", - "\u0120Philipp", - "QL", - "\u0120MI", - "\u0120devot", - "\u0120cure", - "\u0120closest", - "\u0120\u00c3\u0126", - "\u0120sexy", - "\u00e3\u0123\u013d", - "\u0120Death", - "oko", - "ugu", - "\u0120Anne", - "itarian", - "esa", - "\u00d0\u00b5\u00d0\u00b3\u00d0\u00be\u00d0\u00b4", - "\u0120Dur", - "\u0120000", - "zeit", - "\u0120tournament", - "\u0120melhor", - "\u00e0\u00b8\u00aa", - "\u0120indu", - "\u0120flaw", - "\u0120wars", - "\u0120Mind", - "\u0120Iron", - "\u00d1\u0124\u00d0\u00b0\u00d0\u00ba", - "\u0120VR", - "\u0120siz", - "\u0120Southern", - "\u0120\u00ea\u00b7\u00b8\u00eb\u0141\u00ac\u00eb", - "\u0120awak", - "\u0120\u00ec\u0137\u0140", - "\u0120cube", - "believable", - "ifall", - "dis", - "\u0120abandoned", - "mind", - "\u0120parl", - "\u0120classical", - "\u00e8\u012d", - "\u00e1\u00bb\u013bt", - "\u0120Auto", - "\u0120Bor", - "\u00e7\u00a9", - "400", - "\u0120Society", - "\u0120subtle", - "\u0120missions", - "\u0120remembered", - "\u0120Either", - "\u0120daf\u00c3\u00bcr", - "ORD", - "\u0120intensity", - "ESIN", - "\u0120Cup", - "\u0120rarely", - "\u0120toys", - "\u0120Charlie", - "\u00e1\u00bb\u0141", - "\u0120glaube", - "\u0120rounds", - "TIN", - "\u0120capability", - "\u0120derivative", - "\u0120referring", - "\u0120d\u00c3\u00a5", - "\u0120TALI", - "\u0120cotton", - "\u0120confer", - "\u0120columns", - "\u0120liberal", - "\u0120nunca", - "\u0120\u00ce\u00bc\u00ce\u00b5", - "\u0120indo", - "iben", - "\u0120Beispiel", - "\u0120\u00ea\u00b7\u00b8\u00eb\u0142\u0129", - "\u0120\u00d1\u0125\u00d1\u0129", - "\u0120hoy", - "\u0120fry", - "\u0120Scottish", - "\u00e8\u012c", - "\u0120civ", - "\u0120conservative", - "\u0120airpl", - "\u0120sar", - "rus", - "\u0120investments", - "\u0120infinite", - "\u0120\u00e0\u00ae\u0137", - "\u0120TALIESIN", - "\u0120Gary", - "uell", - "\u0120\u00d0\u00b0\u00d0\u00ba", - "\u0120Cir", - "\u0120ritual", - "\u0120>>>", - "\u0120tempt", - "\u0120Tech", - "\u0120Pokemon", - "\u0120improvements", - "\u0120spare", - "\u0120translate", - "\u0120sonra", - "\u0120Film", - "wort", - "\u0120\u00d0\u00bc\u00d0\u00b8", - "\u0120periods", - "\u0120jealous", - "\u00e3\u0123\u0126\u00e3\u0123\u0126", - "\u0120tir", - "MI", - "\u0120conducted", - "\u0120\u00ec\u0137\u012a\u00eb\u0127\u0137", - "09", - "\u0120Polit", - "\u0120Whereas", - "\u0120moisture", - "\u0120sins", - "\u0120kap", - "\u0120\u00d1\u012f\u00d0\u00ba", - "\u0120benim", - "\u0120eliminate", - "\u0120athletes", - "\u0120Manager", - "\u0120featured", - "apore", - "\u00e4\u00ba\u013d", - "\u0120\u00eb\u00b0\u013e", - "\u0120perf", - "\u0120Thus", - "\u0120debut", - "\u00d0\u00be\u00d0\u00b1\u00d1\u0122", - "\u0120se\u00c3\u00b1", - "\u0120mysterious", - "words", - "\u0136\u00ea\u00b0\u0122", - "\u0120checks", - "\u0120volunteer", - "\u0120washing", - "\u0120Marvel", - "\u0120AB", - "issors", - "!'", - "\u0120Full", - "yeon", - "\u0120weigh", - "\u0120JOHN", - "\u0120vos", - "\u0120procedures", - "\u0120addressed", - "\u0120Berlin", - "puter", - "\u0120Ban", - "\u0120medication", - "\u0120drone", - "\u0120\u00d1\u0125\u00d0\u00b1", - "\u0120Jean", - "\u0120caps", - "\u0120disappointed", - "\u0120wore", - "\u0120\u00ea\u00b5\u0143", - "\u0120organize", - "\u0120Halloween", - "\u0120fantasy", - "yard", - "\u0120nosotros", - "\u0120jumped", - "\u0120photography", - "\u0120Name", - "rec", - "AB", - "\u0120blessing", - "\u0120Shut", - "\u0120bitter", - "pop", - "\u00e3\u0123\u013f\u00e3\u0124\u012e", - "\u0120dei", - "\u0120fulfill", - "\u00e7\u0132\u0128", - "\u0120dengan", - "\u0120belo", - "\u0120Meanwhile", - "\u0120depois", - "\u0120diabetes", - "\u0120bund", - "\u0120Zealand", - "\u0120digest", - "\u0120tires", - "\u0120dod", - "agne", - "\u00e1\u00ba\u00bft", - "\u0120peel", - "\u0120\u00d0\u00b7\u00d0\u00b0\u00d0\u00b1", - "\u0120nodes", - "\u0120trends", - "\u0120Switch", - "\u0120Award", - "\u0120Orig", - "\u0120Hal", - "\u0120estas", - "\u0120360", - "\u0120simult", - "\u0120comic", - "\u0120m\u00c3\u0142", - "\u0120balanced", - "\u0120Princess", - "\u0120kilometers", - "\u00e1\u00bb\u00a9", - "\u0120partir", - "\u00ec\u00a4\u0133", - "soft", - "\u0120View", - "\u0120biological", - "inst", - "44", - "\u0120manera", - "\u0120comprehensive", - "\u0120Sab", - "\u0120crimes", - "yers", - "\u0120Company", - "\u0120Phot", - "\u0120pouco", - "iac", - "\u0120beim", - "inate", - "\u0120subsequ", - "\u0120Mayor", - "\u0120centuries", - "\u00c3\u00a8res", - "\u00ec\u0140\u0138\u00ec\u0137\u0126\u00ec\u013c\u0136", - "\u0120\u00ea\u00b7\u00b8\u00eb\u0141\u00bc", - "\u0120Frau", - "\u0120OH", - "\u0120\u00eb\u0123\u013f", - "\u0120Nah", - "\u0120Series", - "\u0120overnight", - "\u00ed\u0134\u012a", - "\u0120\u00e2\u0122\u00a2", - "\u0120trave", - "attered", - "\u0120warri", - "\u0120Grund", - "\u0120Indones", - "\u0120scra", - "oby", - "\u0120Brook", - "\u0120curs", - "\u0120\u00eb\u00b8", - "\u0120explains", - "ramatic", - "\u0120participating", - "\u0120minut", - "\u0120contracts", - "\u0120gegen", - "\u0120disappeared", - "\u0120SN", - "\u0120robust", - "aph", - "\u0120shrim", - "\u0120devast", - "cope", - "\u0120meets", - "\u0120peaceful", - "mate", - "\u0120weld", - "\u0120\u00d7\u00aa", - "don", - "\u00d1\u0125\u00d1\u0124\u00d1\u012e", - "\u0120registered", - "\u0120Nik", - "jin", - "\u0120cav", - "\u0120echt", - "iox", - "\u0120flowing", - "\u00d0\u00bd\u00d0\u00be\u00d1\u0123\u00d1\u0124\u00d0\u00b8", - "\u0120toe", - "\u0120entity", - "\u00d0\u00be\u00d0\u00b2\u00d0\u00b0", - "fits", - "\u0120Patrick", - "\u00d1\u0124\u00d1\u0122", - "\u0120leverage", - "\u0120correl", - "iah", - "\u0120strings", - "istinct", - "\u0120gue", - "archy", - "\u0120tengo", - "\u00c4\u00b1m\u00c4\u00b1z", - "\u0120orbit", - "\u00e4\u00b8\u00ba", - "\u0120\u00d0\u00b5\u00d1\u012b\u00d1\u0133", - "cake", - "\u0120\u00d7\u013e\u00d7\u0136", - "\u0120Minnesota", - "\u0120brake", - "owie", - "\u0120craw", - "\u00ea\u00b8\u00b0\u00eb\u00a5\u00bc", - "\u0120programme", - "\u0120\u00d1\u0123\u00d0\u00bb\u00d1\u0125\u00d1\u0129", - "\u00e5\u0131\u00aa", - "iences", - "\u0120Oui", - "\u0120Pers", - "imiento", - "\u0120Invest", - "\u0120slower", - "\u00e6\u013b\u0124\u00e5\u0122\u013b", - "\u0120Beth", - "\u0120nurse", - "\u0120Spring", - "Sp", - "\u0120unemploy", - "\u00d0\u00b4\u00d0\u00b8", - "\u0120genius", - "\u0120Aaron", - "\u0120\u00ea\u00b7\u00b8\u00eb\u0141\u00ac", - "\u0120ei", - "\u00e3\u0123\u0139\u00e3\u0124\u0129", - "\u0120tanks", - "\u0120aujourd", - "\u0120complexity", - "\u0120\u00d1\u0122\u00d0\u00b5\u00d1\u012a", - "\u0120oldest", - "\u0120letz", - "\u00e5\u0127\u00a5", - "\u0120phenomenon", - "print", - "\u0120Bundes", - "itat", - "\u00ea\u00bb\u013a", - "\u012042", - "\u0120Wi", - "\u0120incom", - "\u0120gek", - "\u0120embrace", - "\u0120ties", - "oute", - "\u0120dose", - "\u0120Friends", - "\u00d1\u012d\u00d1\u0124", - "\u00d0\u00b5\u00d0\u00b3\u00d0\u00be\u00d0\u00b4\u00d0\u00bd\u00d1\u0131", - "\u0120org", - "\u0126\u00eb\u00a1\u013e", - "\u00c3\u00b3g", - "\u0120exceed", - "\u0120gods", - "\u0120\u00ea\u00b1\u00b0\u00ec\u013a\u012a\u00ec\u013c\u0136", - "\u0120societ", - "\u0120Univers", - "it\u00c3\u00a4t", - "\u0120worden", - "\u0120smoking", - "\u0120intens", - "abul", - "emia", - "\u00e8\u0133", - "47", - "fly", - "\u01202006", - "\u0120Seriously", - "\u0120przez", - "\u00e6\u00bc", - "cre", - "\u0120nan", - "\u0120modes", - "\u00d0\u00be\u00d0\u00b2\u00d0\u00b0\u00d1\u0124\u00d1\u012e", - "\u0120Hang", - "emen", - "\u0120beneficial", - "\u0120voters", - "\u0120Broad", - "\u0120bent", - "Wow", - "\u0120mul", - "\u00e5\u0135\u00a5", - "\u0120UC", - "\u0120damaged", - "\u0120Ukraine", - "\u0120wipe", - "\u0120stones", - "\u0120managers", - "\u0120rab", - "\u00d1\u0123\u00d1\u0124\u00d1\u0122\u00d0\u00be", - "lat", - "\u0120dece", - "\u0120graphic", - "\u0120foss", - "\u0120disagree", - "\u0120Amen", - "\u0120secrets", - "hole", - "inkle", - "\u0120fortunate", - "\u0120\u00ec\u00b1", - "\u00ec\u013e\u0126", - "\u00e8\u0132\u00ac", - "\u0120habits", - "\u0120buried", - "\u0120hin", - "\u0120virtually", - "olas", - "\u0120RP", - "\u0120Tab", - "low", - "\u0120sacrific", - "\u0120estimated", - "oln", - "\u00d9\u012d", - "cur", - "\u0120Feel", - "\u0120castle", - "\u0120useless", - "\u0120disg", - "\u0120Jacob", - "\u0120gaan", - "\u0120upside", - "\u0120parece", - "\u00e3\u0125\u00b3\u00e3\u0125", - "\u0120shipping", - "\u0120CR", - "\u0120disrupt", - "acter", - "UND", - "fu", - "\u00e5\u00ae\u012e", - "\u0120Pick", - "\u0120Charl", - "\u0120Bull", - "\u0120enterprise", - "\u0120punishment", - "acking", - "\u0120fraction", - "\u0120tablet", - "\u0120chord", - "\u0120similarly", - "\u00e5\u0127\u00b6\u00e5\u00af\u00a6", - "\u0120Toronto", - "\u0120courts", - "\u00c4\u0141l", - "eszcze", - "\u0120pronoun", - "\u0120Sister", - "\u0120MP", - "\u0120greatly", - "\u0120Dank", - "icop", - "\u0120garbage", - "\u0120resolve", - "\u0120Saf", - "\u0120Gun", - "\u0120compound", - "\u0120\u00eb\u00b0\u00b0", - "\u0120Musik", - "\u00e2\u013b\u00ab", - "\u0120chaos", - "\u0120Whenever", - "\u0120euros", - "\u0120orchest", - "\u0120refriger", - "alan", - "\u00e0\u00b8\u00b7", - "\u0120Amazing", - "\u0120pud", - "agan", - "\u0120jeszcze", - "isy", - "\u0120accuracy", - "\u0120Ama", - "isode", - "\u00eb\u012e\u0122", - "\u0120interpretation", - "\u0120Liber", - "\u00e6\u00b7", - "cam", - "\u0120evolved", - "\u0120Kay", - "\u00d1\u0128\u00d1\u012d", - "\u0120creator", - "itas", - "\u0120alarm", - "\u0120celebration", - "zent", - "\u0120funcion", - "\u0120ov", - "umbling", - "\u0120%", - "\u00e0\u00b8\u012a", - "\u0120restrictions", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d0\u00b2", - "\u0120Kinder", - "\u0120banana", - "\u00d1\u012e\u00d1\u0131", - "\u0120diameter", - "\u0120northern", - "urers", - "\u0120Pas", - "\u00e6\u012a\u0133\u00e7\u013c\u0126", - "\u0120workforce", - "\u0120jung", - "\u0120guarante", - "\u0120equilib", - "\u0120suite", - "\u0120euro", - "\u0120deliber", - "Ste", - "\u0120downtown", - "\u0120chin", - "\u0120codes", - "edia", - "\u0120sheep", - "reshold", - "wnie", - "\u00c3\u00b3b", - "\u0120underlying", - "lia", - "jer", - "\u00cf\u0122\u00cf\u012e", - "\u00e7\u013f", - "throp", - "\u0120zap", - "\u0120vacuum", - "\u0120Hab", - "\u0120wrapped", - "\u00ec\u00a2", - "\u0120inventory", - "\u00d0\u00bc\u00d0\u00b0", - "\u0120coord", - "\u0120plates", - "\u0120symm", - "Te", - "\u0120w\u00c5\u0124a\u00c5\u013dnie", - "\u0120reaches", - "\u0120lonely", - "Script", - "lee", - "esser", - "\u0120\u00ea\u00b1\u00b8", - "\u0120Gesch", - "\u0120Moving", - "\u0120r\u00c3\u00a9p", - "\u0120Vill", - "\u00e5\u0132\u012a", - "\u0120Rachel", - "\u0120temos", - "ONE", - "\u0120strain", - "\u0120angel", - "\u0120f\u00c3\u00a5", - "Tr", - "\u0120acho", - "\u0120highlights", - "\u0120Wer", - "\u0120Carl", - "\u0120blur", - "\u0120regards", - "\u00c2\u00b7", - "\u00d0\u00b8\u00d0\u00bb\u00d1\u0123\u00d1\u0131", - "\u0120recre", - "\u0120Yani", - "UCK", - "\u0142\u00b8", - "\u0120electrons", - "\u0120Spiel", - "\u0120ved", - "\u00da\u00be", - "\u0120beam", - "\u0120idiot", - "\u00eb\u0135\u00a4", - "\u00d0\u00bd\u00d0\u00b0\u00d1\u0129", - "idd", - "\u0120ski", - "itative", - "\u0120hypothes", - "\u00e3\u0123\u00a7\u00e3\u0123\u013b\u00e3\u0123\u0143", - "enter", - "\u0120\u00ec\u0137\u0126\u00eb\u012d\u012a\u00eb", - "\u0120ihre", - "\u0120preview", - "angel", - "\u0120demon", - "\u0120dus", - "\u0120dic", - "\u0120Kom", - "LEY", - "...!", - "\u0120sieht", - "\u0120Sonic", - "\u0120tenho", - "anas", - "\u0120digit", - "\u0120Maar", - "\u0120undergrad", - "ouncer", - "uffy", - "\u0120conversion", - "\u0120disconnect", - "\u0120echo", - "omer", - "\u0120curriculum", - "\u0120perch\u00c3\u00a9", - "\u0120wand", - "..?", - "\u0120rolled", - "\u0120entrepreneur", - "\u0120theoret", - "\u0120\u00d1\u012b\u00d0\u00be", - "\u0120insights", - "\u0120zusammen", - "oin", - "rett", - "produ", - "\u0120visitors", - "eous", - "\u0120grandmother", - "\u0120humor", - "\u0120\u00d0\u00bd\u00d0\u00b8\u00d1\u0127", - "zenia", - "inson", - "\u0120reset", - "\u0120baseball", - "\u0120matching", - "\u00eb\u012d\u00a4\u00ea\u00b0\u0122", - "\u0120punto", - "\u00ec\u00a1", - "\u0120rede", - "\u0120addressing", - "\u0120forecast", - "\u0120Bol", - "\u0120colored", - "\u0120documentation", - "\u0120expectation", - "\u0120Northern", - "\u0120creo", - "\u0120\u00e0\u00ae\u013c", - "fon", - "\u0120unsere", - "UM", - "\u0120copies", - "\u0120expanded", - "\u0120veterans", - "\u0120Alm", - "\u0120\u00d0\u00b2\u00d0\u00be\u00d0\u00be\u00d0\u00b1\u00d1\u012b\u00d0\u00b5", - "\u0120psychological", - "\u0120nosso", - "\u0120payments", - "imeters", - "\u0120-->", - "\u0120Jennifer", - "\u0120volunteers", - "osse", - "orious", - "\u0120\u00d0\u00b1\u00d1\u012d\u00d0\u00bb\u00d0\u00b8", - "\u00e8\u0124", - "\u0120Ess", - "ws", - "\u0120BC", - "\u0120IC", - "Woman", - "\u0120vont", - "\u0120ethnic", - "ENN", - "\u00d0\u00b8\u00d0\u00bc\u00d0\u00be", - "\u0120lob", - "\u0120oui", - "cs", - "\u0120rehe", - "\u0120\u00ec\u0142\u0123", - "\u0120chick", - "\u00c3\u00basica", - "\u0120kont", - "\u0120District", - "\u0120pile", - "\u0120\u00d0\u00b0\u00d0\u00b2", - "\u00d0\u00b5\u00d0\u00b9\u00d1\u0123\u00d1\u0124\u00d0\u00b2", - "\u0120\u00c2\u00a3", - "\u0120issued", - "\u0120\u00d0\u00ba\u00d0\u00be\u00d0\u00bc\u00d0\u00bf", - "\u0120prosper", - "\u0120profound", - "\u0120Dear", - "\u0120\u00e3\u0123\u0135", - "\u0120funded", - "\u0120bisa", - "\u0140\u013a\u00eb", - "\u00d7\u0141", - "\u0120\u00ec\u013f\u013a", - "\u0120twelve", - "\u0120Champions", - "\u00e9\u013f\u0140\u00e5\u00b8\u00b8", - "\u00d1\u0123\u00d0\u00bb", - "\u01202005", - "pm", - "\u0120onde", - "\u0120diff\u00c3\u00a9", - "\u0120Chall", - "\u0120difficulties", - "\u0120garage", - "\u0120d\u00c3\u00a1", - "\u00c3\u00bcnk", - "\u0120\u00eb\u00ac\u00bc", - "\u0120tran", - "\u0120submitted", - "zw", - "\u00d9\u012a\u00d8\u00a7", - "\u0120ark", - "\u0120\u00ec\u0126\u00b1", - "\u0120grocery", - "\u00d0\u00be\u00d0\u00bd\u00d0\u00b0", - "iere", - "\u0120aest", - "\u0120exhibition", - "\u0120r\u00c3\u00a9s", - "\u0120consistency", - "\u0120cookie", - "\u00d0\u00bd\u00d0\u00b5\u00d0\u00b9", - "\u0120replacement", - "\u00e6\u00b2\u00b9", - "\u0120Sem", - "\u0120\u00ec\u0124\u00ac\u00ec\u013c\u00a9", - "800", - "\u0120genes", - "\u0120transaction", - "\u0120EL", - "\u0120durante", - "ibles", - "\u0120Eat", - "tail", - "issance", - "\u0120toss", - "\u0120survived", - "\u0120offices", - "\u0120supportive", - "Where", - "\u0120toutes", - "\u0120\u00eb\u00a7\u012b", - "\u0120jokes", - "ieron", - "apers", - "\u0120mature", - "\u0120Marsh", - "\u0120sido", - "kind", - "\u0120realmente", - "\u0120Chef", - "\u0120quelque", - "\u0120judges", - "eft", - "ERS", - "\u0120jet", - "\u0120persons", - "\u00e8\u00bb", - "izations", - "rik", - "\u0120shops", - "\u0120Wy", - "\u0120eleg", - "qu\u00c3\u00a8", - "quoi", - "\u0120juga", - "\u0120\u00ed\u0137\u013e\u00eb\u00b2\u012a", - "\u0120Question", - "\u0120Global", - "\u0120\u00ec\u0137\u00bd\u00ea\u00b0\u0126", - "\u0120Station", - "\u00e6\u0130\u00a5", - "\u0120Ohio", - "\u0120sticky", - "\u0120stressed", - "\u0120g\u00c3\u00bcn", - "\u0120\u00ed\u013f", - "\u00d1\u0123\u00d1\u0124\u00d1\u0125\u00d0\u00bf", - "\u00e9\u00a1\u012e", - "\u0120PhD", - "immer", - "\u0120mentor", - "\u0120invented", - "\u0120reun", - "\u0120inevit", - "\u0120pol\u00c3\u0143t", - "\u0120execute", - "\u0120Story", - "\u0120outstanding", - "\u0120guer", - "\u0120Rain", - "\u0120choses", - "\u0120Tit", - "\u0120\u00d1\u0123\u00d0\u00b5\u00d1\u0122", - "\u0120Singapore", - "\u0120None", - "\u0120chronic", - "\u00b0\u00eb\u012f\u00b0", - "\u0120ego", - "\u00e6\u0142\u00b7", - "EST", - "\u00e3\u0123\u0124\u00e3\u0124\u012c", - "\u0120Wang", - "\u0120NAT", - "\u0120aug", - "\u0120desktop", - "\u0120eternal", - "\u0120\u00ec\u0124\u00ac\u00ec\u012d\u00a4", - "\u0120Constitution", - "\u00ec\u0124\u00ac\u00eb", - "\u00d7\u013b\u00d7\u013e", - "pres", - "\u0120\u00d0\u00a2\u00d1\u012d", - "\u0120interf", - "\u0120lists", - "\u0120fights", - "ften", - "\u0120Iowa", - "\u0120motivated", - "\u0120Hosp", - "\u0120elsewhere", - "\u0120paths", - "\u0120instances", - "Bl", - "range", - "\u00e1\u00bb\u00b1", - "\u0120Sit", - "mana", - "\u0120\u00ec\u012d\u013e\u00ec\u0140\u0133", - "\u0120m\u00c3\u00acnh", - "ansas", - "\u0120sna", - "\u0120philosoph", - "\u0120passe", - "\u00c6\u00b0\u00e1\u00bb\u013fi", - "akh", - "ental", - "\u0120ihn", - "ructor", - "\u0120\u00d0\u00b2\u00d0\u00b0\u00d1\u012a", - "\u0120generous", - "\u0120pivot", - "\u00d0\u00bf\u00d0\u00be\u00d0\u00bb", - "\u0120jamais", - "\u0120coment", - "\u0120Lew", - "odzi", - "\u0120Xbox", - "\u0120\u00d0\u00b2\u00d0\u00be\u00d0\u00b4", - "\u0120consent", - "\u012b\u00ec\u0140\u00a5", - "\u0120dispar", - "lass", - "\u0120Governor", - "Beifall", - "\u0120\u00ea\u00b0\u013e", - "\u0120beloved", - "\u00d7\u0142\u00d7\u0137", - "sell", - "\u0120honored", - "leh", - "\u0120w\u00c3\u00a4re", - "unting", - "\u0120fraud", - "\u0120RAM", - "\u00ea\u00b1\u00b8", - "\u0120kills", - "\u0120economics", - "04", - "\u00d0\u00bf\u00d0\u00b5\u00d1\u0122", - "\u0120coisas", - "\u0120\u00d0\u00b8\u00d0\u00b3\u00d1\u0122", - "\u00c3\u0143m", - "\u0120m\u00c3\u00b6chte", - "\u0120\u00ec\u00b5\u013e", - "\u0120stimul", - "\u0120fastest", - "lv", - "\u0120g\u00c3\u00a9n", - "\u0120Sounds", - "\u01201970", - "\u0120homework", - "speaking", - "\u0120encouraging", - "\u0120query", - "\u0120revers", - "profit", - "\u0120dy", - "\u0120\u00ec\u0140\u0133", - "\u00eb\u012c\u0136\u00eb\u012f\u00b0\u00ec\u013c\u0136", - "\u0120soap", - "\u0120Gall", - "\u0120CN", - "\u0120Ans", - "\u0120fic", - "anks", - "\u0120dessert", - "\u0120\u00ec\u0142\u0122\u00ed\u013f\u00ac", - "\u0120Making", - "\u0120come\u00c3\u00a7", - "\u00ea\u00b3\u0126", - "\u0120association", - "Dad", - "hee", - "\u0120hogy", - "\u0120apro", - "\u0120invisible", - "American", - "\u00ed\u0130", - "\u0120vibe", - "\u0120emissions", - "\u0120advocate", - "\u0120kicked", - "\u0120vel", - "\u0120summar", - "\u0120freaking", - "chron", - "\u0120pinch", - "\u0120wszystk", - "iscal", - "\u0120proved", - "\u0120mindful", - "\u0120t\u00c3\u00a4", - "\u0120noises", - "\u0120isolated", - "\u0120crossed", - "\u0120\u00ea\u00b0\u0137", - "\u0120voil\u00c3\u0142", - "\u0120chore", - "\u0120RA", - "Com", - "\u0120relaxed", - "atro", - "\u0120prevention", - "Voiceover", - "OD", - "\u0120Covid", - "\u0120separation", - "\u0120-[", - "\u00d0\u00b8\u00d1\u0129\u00d0\u00b5\u00d0\u00b3\u00d0\u00be", - "\u00e7\u013b\u00bc", - "\u0120SD", - "bleep", - "\u0120independence", - "\u0120partial", - "\u0120algorithms", - "\u0120Anyone", - "\u0120associate", - "hum", - "icular", - "\u0120b\u00e1\u00ba\u00a1n", - "\u0120battles", - "Good", - "Applause", - "\u0120bastante", - "\u0120advant", - "\u0120Sweet", - "\u0120refused", - "\u00e3\u0124\u00b8", - "\u0120\u00d1\u0124\u00d0\u00b5\u00d0\u00b1\u00d0\u00b5", - "plet", - "\u0120encouraged", - "\u00e5\u0135\u00a6", - "\u0120miracle", - "\u0120Bun", - "\u0120Var", - "rimination", - "elect", - "\u0120Mult", - "\u0120delivering", - "eing", - "\u0120cm", - "nehmen", - "\u0120Line", - "\u0120\u00eb\u00a7\u012e", - "enced", - "\u0120Sound", - "\u0120Contin", - "ijd", - "UNG", - "kle", - "\u0120threshold", - "\u0120compact", - "adt", - "\u0120toes", - "\u0120Pur", - "owned", - "mented", - "\u0120designing", - "\u0120vaccinated", - "\u0120exhaust", - "\u0120basics", - "\u0120consists", - "\u0120Guy", - "aczy", - "\u0120m\u00c3\u0143", - "won", - "\u00e5\u00ae\u00b3", - "\u012085", - "\u00e6\u0124", - "\u0120mum", - "\u0120ignor", - "\u0120printing", - "acular", - "pow", - "\u0120expanding", - "\u0120gir", - "\u0120Cab", - "\u00ed\u013a\u00b8", - "\u00d1\u0124\u00d1\u012e\u00d1\u0123\u00d1\u0131", - "\u0120\u00ec\u0139\u00ac\u00eb\u0141\u00ac\u00eb\u00b6\u0126", - "\u0120angles", - "\u0120terminal", - "\u0120Won", - "\u0120Interesting", - "\u0120crossing", - "\u0120bonds", - "\u0120pueden", - "\u0120orb", - "lar\u00c4\u00b1n", - "\u0120creepy", - "\u0120nutrition", - "\u0120allies", - "\u0120wireless", - "\u0120desired", - "\u0120compute", - "\u0120Arizona", - "\u0120Beautiful", - "\u0120produces", - "\u0120nuestro", - "ted", - "\u0120eligible", - "\u0120\u00d1\u0123\u00d0\u00be\u00d0\u00b7", - "icial", - "\u0120Hero", - "\u0120consume", - "\u0120robots", - "\u0120purchased", - "cci\u00c3\u00b3n", - "\u0120iz", - "\u00c6\u00b0\u00e1\u00bb\u00a3c", - "\u00ce\u00af\u00ce\u00bd\u00ce\u00b1\u00ce\u00b9", - "\u0120\u00d8\u00a3\u00d9\u0128", - "\u0120shadows", - "\u0120Media", - "\u0120princess", - "\u0120klar", - "\u0120wooden", - "\u0120usar", - "\u0120g\u00c3\u00bczel", - "\u0120slot", - "rade", - "\u0120\u00eb\u0134", - "\u0120harmon", - "\u0120ingredient", - "orship", - "eki", - "\u0120grandfather", - "\u0120excitement", - "\u0120politicians", - "..!", - "\u0120outs", - "\u0120separately", - "\u0120\u00d1\u0131\u00d0\u00ba", - "\u0120Welt", - "\u0120Pow", - "jan", - "\u0120orientation", - "\u00e5\u0131\u012d", - "LC", - "agem", - "\u00db\u012e\u00da\u00ba", - "\u00e5\u0132\u0139", - "\u0120branches", - "aden", - "rente", - "\u0120Ihr", - "asm", - "\u0120est\u00c3\u00a3o", - "\u0120Nic", - "\u0120slave", - "\u0120compress", - "crowd", - "\u0120climbing", - "\u0120Management", - "\u0120Bah", - "\u0120panic", - "\u0120kor", - "\u0120cooling", - "\u0120bind", - "\u0120\u00d0\u00b7\u00d0\u00b0\u00d0\u00b4", - "\u0120rack", - "\u0120entit", - "\u0120sends", - "\u0120yourselves", - "des", - "\u0120Muslims", - "\u0120\u00ed\u013c", - "isma", - "cycle", - "unkt", - "\u0120Core", - "\u0120injuries", - "\u0120identical", - "\u00d0\u00ba\u00d0\u00b0\u00d1\u0131", - "\u0120Deutschland", - "\u0120\u00d0\u00b5\u00d0\u00b5", - "isan", - "\u0120truc", - "leton", - "\u0120backup", - "\u0120ultra", - "\u0120abund", - "illeurs", - "\u0120by\u00c5\u0124o", - "\u00e5\u0127\u0125", - "orted", - "\u0120earthqu", - "\u0120\u00d0\u00ba\u00d0\u00bb", - "\u0120observation", - "\u0120maintenant", - "elen", - "\u0120settled", - "\u0120pela", - "\u0120Econom", - "\u0120\u00d5", - "\u0120steering", - "\u0120ALL", - "\u0120Cher", - "\u0120patience", - "\u0120Snow", - "\u0120bor", - "\u0120worthy", - "\u0120c\u00c3\u00a1i", - "\u0120\u00d7\u00a7", - "\u0120\u00ce\u00ba\u00ce\u00b1", - "dog", - "\u0120Karen", - "illes", - "\u00ce\u00b2", - "\u0120agriculture", - "\u00d7\u0137\u00d7\u0141", - "\u0120Sean", - "\u0120sensors", - "\u00ed\u0137\u00b4\u00eb", - "agh", - "\u0120publicly", - "\u0120peux", - "\u0120Alexander", - "\u0120priorit", - "\u0120lazy", - "ardon", - "attering", - "\u0120costume", - "\u00d8\u00b3\u00d8\u00aa", - "\u00e8\u00bf\u013a", - "\u0120unw", - "\u00d0\u013d", - "\u0120thickness", - "quito", - "gunt", - "istas", - "neys", - "\u0120\u00eb\u0132\u013a\u00ea\u00b2\u012e", - "\u0120Brasil", - "\u0120token", - "\u0120affili", - "lon", - "\u0120f\u00c3\u00a5r", - "\u0120Beach", - "\u0120witch", - "\u0120Seven", - "\u0120pant", - "\u00ce\u00bb\u00ce\u00bb", - "\u0120captain", - "\u00e5\u013f", - "\u0120veut", - "\u0120pouvoir", - "acz", - "\u0120Barb", - "\u0120utility", - "\u0120contemporary", - "\u0120obtained", - "\u0120paintings", - "ear", - "\u0120pean", - "\u0120Og", - "\u0120cust", - "\u00d0\u00bb\u00d0\u00b5\u00d0\u00bc", - "\u0124\u013a\u00eb", - "\u0120Isso", - "\u0120aconte", - "\u0120Tele", - "\u0120Assistant", - "\u00c3\u012b", - "\u00ed\u0138\u012a\u00ec\u012c\u00b5\u00eb\u012d\u012a\u00eb\u012d\u00a4", - "\u0120counts", - "\u0120buck", - "\u0120Deep", - "\u0120tackle", - "\u0120harsh", - "\u0120decides", - "\u00e9\u0139\u013e", - ".\u00e2\u0122\u012d", - "\u00e9\u0124\u012c", - "\u0120Angel", - "\u0120laying", - "\u0120calories", - "\u0120controlling", - "\u0120advantages", - "\u0120\u00d1\u012f\u00d1\u0124\u00d0\u00be\u00d0\u00b9", - "\u0120approaching", - "\u0120threats", - "akan", - "ematic", - "mann", - "\u00ea\u00b3\u00b5", - "mumbles", - "aci\u00c3\u00b3", - "\u0120maintaining", - "\u0120founder", - "lah", - "fight", - "\u0120admitted", - "\u00e2\u0122\u00a6.", - "\u0137\u012e", - "abol", - "\u0120usage", - "\u0120nonsense", - "\u0120Palest", - "\u0120contre", - "\u0120Democratic", - "\u0120ER", - "jekt", - "\u0120arbit", - "\u0120\u00d0\u00b3\u00d0\u00be\u00d0\u00bb", - "\u0120Michelle", - "icher", - "esh", - "\u0120Pho", - "\u00d0\u00ba\u00d0\u00be\u00d0\u00bc", - "49", - "\u0120Energy", - "\u00ce\u00bf\u00cf\u012f", - "\u0120cents", - "\u0120refers", - "\u0120gospel", - "\u0120Sha", - "\u0120Share", - "\u00d7\u013b\u00d7\u0142", - "\u0120clinic", - "\u0120\u00eb\u0126\u00a3", - "\u0120equality", - "ugs", - "\u0120shed", - "\u0120planes", - "\u0120toute", - "reck", - "\u0120strand", - "\u0120biology", - "\u0120league", - "\u0120Pok", - "\u0120n\u00c3\u00bamero", - "\u0120Coast", - "\u0120consistently", - "\u0120nucle", - "OOOO", - "\u0120objet", - "\u0120chor", - "\u0120ginger", - "\u0120dabei", - "\u0120cooperation", - "\u00e0\u00af\u012f.", - "nten", - "\u00e7\u00a4", - "l\u00c3\u0142", - "\u00ec\u0138\u0133", - "rado", - "\u0120passive", - "\u0120gloves", - "\u0120underground", - "\u0120logical", - "\u0120ket", - "\u0120functionality", - "\u00b8\u00eb\u00a6\u00ac", - "\u0120portal", - "eller", - "\u00d7\u013b\u00d7\u00a8", - "\u0120Ted", - "\u0120Gre", - "\u0132\u013e", - "\u0120personnel", - "\u0120emerging", - "\u0120F\u00c3\u00bcr", - "\u0120meantime", - "usalem", - "\u0120Clear", - "\u0120trapped", - "\u0120\u00ec\u013c\u00b0", - "\u0120displ", - "\u0120mettre", - "\u0120municip", - "\u0120withdraw", - "\u0120spat", - "unes", - "\u0120accessibility", - "\u00e6\u012a\u0133\u00e4\u00bb\u00ac", - "\u0120apare", - "\u0120prospect", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d0\u00b7", - "\u0120copper", - "\u0120PRO", - "\u00cf\u0127\u00cf\u0126", - "\u0120attacking", - "\u0120Vin", - "\u0120Stone", - "\u0120investigate", - "style", - "\u0120\u00ce\u00bb", - "\u00eb\u00a1\u013f", - "\u00eb\u00a7\u012a", - "\u0120inspect", - "\u0120liver", - "\u00d0\u00b0\u00d0\u00bb\u00d0\u00b8\u00d1\u0123\u00d1\u012e", - "\u0120sera", - "halten", - "eman", - "\u0120ministry", - "''", - "\u0120dots", - "\u00e3\u0127\u012d\u00e3\u0127\u012d\u00e3\u0127\u012d\u00e3\u0127\u012d", - "\u00d1\u0125\u00d1\u0123\u00d1\u0124", - "\u0120Jak", - "AKE", - "\u0120gaps", - "ucker", - "\u0120\u00d0\u00b8\u00d0\u00bd\u00d1\u0124\u00d0\u00b5\u00d1\u0122\u00d0\u00b5\u00d1\u0123", - "\u0120Emily", - "\u0120interval", - "\u0120tender", - "\u0120Technology", - "game", - "\u0120trib", - "\u00d9\u0126\u00d8\u00a7", - "\u0120Development", - "\u00d9\u0127\u00d8\u00a7", - "\u0120wrist", - "\u0120fires", - "\u0120targeted", - "\u00ec\u0142\u0132", - "\u0120sod", - "\u00ed\u013c\u012e", - "\u0120oldu\u00c4\u0141", - "\u0120seasons", - "ventions", - "\u0120\u00d0\u00bd\u00d0\u00b5\u00d0\u00b3\u00d0\u00be", - "\u0120sometime", - "\u00d0\u00bb\u00d0\u00b8\u00d0\u00b2", - "n\u00c3\u00a9", - "\u0120t\u00c3\u00ba", - "\u0120Deus", - "\u0120execution", - "\u00c3\u00a1p", - "\u0120Change", - "\u0120Indeed", - "\u0120regulation", - "\u0120Hung", - "\u00c3\u00a9is", - "\u0120wishes", - "\u0120jazz", - "\u0120structural", - "\u0120blowing", - "\u0120by\u00c4\u0129", - "\u0120thermal", - "phant", - "\u00d1\u0122\u00d1\u0125\u00d0\u00b7", - "\u00d0\u00b0\u00d0\u00bd\u00d1\u0124", - "\u0120Pull", - "\u0120confusion", - "\u00d0\u00bd\u00d1\u012d\u00d0\u00bc\u00d0\u00b8", - "\u0120scenarios", - "\u00ec\u0142\u0123\u00ec\u013e\u00bc\u00eb\u00a1\u013e", - "\u0120\u00d0\u00b4\u00d0\u00b5\u00d1\u0124", - "\u0120tattoo", - "\u0120autre", - "\u0120heating", - "\u0120treating", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00bd\u00d0\u00b8\u00d0\u00bc", - "\u0120exclus", - "\u0120LOL", - "wear", - "agle", - "\u0120zur\u00c3\u00bcck", - "\u0120rational", - "su", - "\u0120deter", - "\u0120Native", - "\u00e0\u00ae\u0137\u00e0\u00ae\u00b3", - "ached", - "\u0120\u00e3\u0125", - "\u0120Entonces", - "\u0120hora", - "\u00ec\u013f\u00b4\u00ec\u0139\u0132\u00ec\u013c\u0136", - "\u0120lite", - "\u00c3\u00ab", - "\u0120sixth", - "\u0120\u00d0\u00b1\u00d0\u00be\u00d0\u00bb\u00d0\u00b5\u00d0\u00b5", - "actor", - "\u0120psychology", - "\u00e7\u013d\u00b8", - "\u0120demands", - "\u0120peer", - "\u0120newly", - "\u0120WWE", - "Donald", - "\u0120Box", - "\u0120pine", - "\u0120loading", - "\u0120Nico", - "\u0120s\u00c5\u0124", - "omme", - "ART", - "\u0120recruit", - "\u0120bugs", - "arents", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d0\u00b1", - "\u0120Inside", - "ipper", - "dramatic", - "\u0120planets", - "orde", - "\u0120yoga", - "child", - "\u0120Marie", - "\u0120\u00e3\u0123\u0124", - "\u0120BL", - "\u0120filmed", - "\u0120refresh", - "\u0120tomatoes", - "\u0120fet", - "Qu\u00c3\u00a9", - "\u0120!!", - "\u0120\u00eb\u0124\u00b4\u00eb", - "rine", - "\u0120interactive", - "sal", - "annah", - "pez", - "\u00e7\u00b6\u0135", - "\u0120understands", - "\u0120Tokyo", - "\u0120libraries", - "\u0120reader", - "\u0133\u0132", - "oz", - "\u0120Ende", - "\u0120Flo", - "\u0120mild", - "\u0120poetry", - "\u0120\u00d0\u00b6\u00d0\u00b8\u00d0\u00b2", - "\u00e6\u0126\u013d", - "\u0120behave", - "\u0120doen", - "\u0120Susan", - "page", - "raham", - "\u0120communications", - "\u0120tuning", - "\u0120pac", - "\u0120anxious", - "IO", - "Mark", - "\u0120hi\u00c3\u00a7", - "books", - "\u0120piss", - "\u0120enabled", - "achelor", - "\u0120FOR", - "\u0120\u00c3\u00a9c", - "\u0120TR", - "ilst", - "hat", - "\u0120\u00ec\u013f\u012e", - "\u0120tych", - "\u0120jar", - "\u0120builds", - "\u0120Argent", - "\u0120intermedi", - "\u0120lou", - "\u0120ara", - "\u0120assignment", - "\u0120cabinet", - "\u0120retirement", - "\u00e3\u0123\u00bb", - "\u0120disabled", - "rica", - "\u0120awards", - "\u0120boots", - "\u0120acknowled", - "\u0120thy", - "\u0120\u00ea\u00b5\u00ac", - "\u0120synd", - "\u00d0\u00bd\u00d0\u00b8\u00d0\u00b9", - "ilton", - "\u0120probl", - "\u0120Fal", - "\u0120verdade", - "\u0120700", - "\u0120Learning", - "ocus", - "\u0120palace", - "Not", - "tain", - "cm", - "\u0120magnet", - "incoln", - "\u0120figuring", - "\u0120Lyn", - "\u0120Boss", - "\u0120VO", - "\u0120diagnosis", - "\u0120equipped", - "watch", - "inos", - "aders", - "\u0120shelf", - "\u0120organis", - "\u0120nod", - "\u0120k\u00c4\u00b1z", - "ppers", - "\u0120restore", - "\u0120artic", - "\u0120Voice", - "\u00c4\u00b1yorum", - "\u00ea\u00b2\u00a9", - "\u0120spreading", - "\u0120hips", - "\u0120ward", - "ureau", - "\u0120intersection", - "66", - "\u012039", - "\u00e7\u00b3", - "\u0120waited", - "\u00ec\u00b4", - "hhhh", - "\u0120dys", - "\u0120EN", - "\u0120batch", - "\u0120caf", - "\u0120marker", - "\u00e5\u00a4\u00a7\u00e5\u00ae\u00b6\u00e5\u00a5\u00bd", - "orable", - "\u00c3\u00b3ria", - "\u0120stepped", - "\u0120celebrating", - "\u00d0\u00b0\u00d0\u00bd\u00d0\u00b0", - "\u0120worn", - "\u0120Fol", - "\u0120pla", - "\u0120attempts", - "\u0120tweet", - "\u0120rust", - "gence", - "\u00ed\u0128\u00b5", - "\u0120revel", - "\u0120recept", - "eness", - "\u0120((", - "\u00e3\u0125\u00bc\u00e3\u0125", - "!\u00e2\u0122\u012d", - "\u0120\u00ec\u0128\u0132", - "\u0120influenced", - "\u00d0\u00b8\u00d0\u00b6", - "\u0120\u00d0\u00ba\u00d0\u00be\u00d0\u00bd\u00d0\u00b5\u00d1\u0129\u00d0\u00bd\u00d0\u00be", - "\u0120colleges", - "ioni", - "\u0120sag", - "Ann", - "olar", - "\u0120expressions", - "\u0120suits", - "\u0120ownership", - "eland", - "piece", - "\u00e6\u0122\u0130\u00e4\u00b9\u012a", - "\u0120despu\u00c3\u00a9s", - "\u0120tel", - "\u0120insult", - "\u0120\u00ea\u00b5\u012b\u00ec\u0140\u00a5", - "\u0120Small", - "\u0120FR", - "oka", - "berries", - "\u0120Anton", - "\u00d0\u00b5\u00d0\u00bb\u00d1\u0131", - "\u00d1\u0131\u00d1\u0123", - "\u0120valve", - "acts", - "\u0120woods", - "\u00e0\u00ae\u00a3", - "\u0120cultiv", - "\u0120f\u00c3\u00a1", - "\u00e3\u0123\u00a8\u00e3\u0123\u0126\u00e3\u0123\u0128", - "\u0120cheers", - "\u0120assumption", - "\u0120fitness", - "\u00c3\u0143cul", - "\u0120podr", - "\u0120weit", - "\u0120Hind", - "\u0120dign", - "\u0120\u00d0\u00b7\u00d0\u00bd", - "\u0120squad", - "\u0120destro", - "cere", - "shirt", - "immt", - "engers", - "\u0120s\u00c3\u00a4", - "k\u00c5\u0124ad", - "\u0120\u00c8\u013b", - "\u0120occas", - "\u0120\u00ec\u00a4\u0126", - "\u0120processor", - "\u0120DM", - "\u0120Daddy", - "\u0120sooner", - "\u0120straightforward", - "\u0120departments", - "\u0120Chrome", - "\u0120workplace", - "\u0120Python", - "\u0120meng", - "\u0120DAN", - "\u0120Ice", - "\u0120\u00eb\u012a\u012a", - "\u0120Gi", - "\u0120hiring", - "\u0120landed", - "\u0120democratic", - "iedz", - "\u00e3\u0123\u013a\u00e3\u0124\u0125", - "\u0120sev", - "icia", - "\u0120especial", - "\u0120Nous", - "\u0120h\u00c3\u00a4t", - "\u0120bou", - "pert", - "iesz", - "\u00e5\u0133\u0122", - "\u0120vil", - "\u00c5\u013dli", - "\u0120\u00c3\u00aen", - "\u0120losses", - "\u00e9\u0137\u00b7", - "\u0120toast", - "\u0120realm", - "\u0120Austin", - "\u0120Information", - "\u0120resume", - "\u0120chase", - "\u0120salary", - "\u0120\u00eb\u00b6\u0126", - "\u00d0\u00bb\u00d0\u00b8\u00d1\u0129", - "\u0120\u00d1\u0123\u00d0\u00bb\u00d0\u00b5\u00d0\u00b4", - "\u0120Further", - "\u0120caring", - "\u0120vig", - "\u0120valor", - "\u00e8\u00bf\u013b\u00e4\u00b8\u00aa", - "\u0120\u00d1\u0129\u00d0\u00b0", - "\u0120analytics", - "\u0120globe", - "\u0120MAN", - "\u0120nel", - "\u00ec\u013f\u00b4\u00ec\u0137\u00bc", - "\u0141\u00bc", - "\u0120oy", - "\u00ed\u0137\u013a\u00ec\u0126\u00b8\u00ec\u013c\u0136", - "jen", - "\u0120troubles", - "ahaha", - "\u0120churches", - "uet", - "\u0120measurements", - "bil", - "\u00ec\u00bd", - "ifully", - "\u00d0\u00b8\u00d0\u00bd\u00d1\u0125", - "\u0120Wilson", - "\u00a6\u00b4", - "\u0120\u00ed\u012e\u012e", - "\u0120\u00ec\u00b0\u00a8", - "\u0120p\u00c3\u00bablic", - "\u0120Jerusalem", - "\u0120nails", - "\u0120spine", - "\u0120hemos", - "\u0120zn", - "quis", - "\u0120Leben", - "\u0120references", - "ITH", - "iper", - "\u0120\u00d1\u0123\u00d0\u00b5\u00d0\u00b1\u00d1\u0131", - "\u00ec\u0123", - "\u0120Wa", - "state", - "\u00a7\u013f", - "\u00e5\u0127\u00b1", - "\u0120Gener", - "\u0120actress", - "\u0120Enjoy", - "\u00e0\u00b9\u0125", - "\u0120\u00d7\u0134", - "\u0120infected", - "\u0120shaking", - "\u0120nick", - "\u00e0\u00b8\u00b8", - "\u0120fot", - "\u0120accomplished", - "uke", - "\u0120sheets", - "\u0120fence", - "\u0120nursing", - "\u0120introducing", - "\u0120feat", - "One", - "TO", - "\u0120clubs", - "\u0120Bruce", - "onge", - "change", - "\u0120Batman", - "\u00e5\u0131\u00b0", - "\u0120Officer", - "\u0120hydro", - "\u0120supplement", - "\u0120cela", - "\u0120longest", - "\u0120competing", - "\u0120conhe", - "giving", - "\u0120brains", - "\u0120loans", - "\u0120wage", - "\u0120Clinton", - "\u0120s\u00c4\u0125", - "aneous", - "\u0120lord", - "\u00d1\u0122\u00d1\u0125\u00d0\u00b6", - "\u0120quiz", - "\u0120stiff", - "\u0120LGB", - "sz", - "ME", - "mare", - "there", - "\u0120n\u00c3\u00a4r", - "\u0120Mand", - "last", - "\u0120dag", - "\u0120halfway", - "\u0120Band", - "\u0120\u00eb\u012d\u00a4\u00ec\u012d\u013e", - "\u0120Aren", - "\u0120ile", - "PN", - "ento", - "\u0120algum", - "\u0120soccer", - "\u0120blocked", - "\u0120Jonathan", - "\u0120sew", - "\u0120Testament", - "\u0120vale", - "\u0120behavi", - "\u00e5\u00a7\u012d", - "\u0120conna", - "ICH", - "\u0120audiences", - "ml", - "ammad", - "\u0120\u00ec\u0124\u00b4\u00ec", - "IGH", - "\u0120races", - "emed", - "\u0120m\u00e1\u00bb\u013bt", - "\u00c3\u00af", - "\u0120overs", - "\u0120declared", - "\u0120sana", - "\u0120Una", - "\u0120\u00d1\u0122\u00d0\u00b5", - "ucks", - "\u0120pairs", - "\u0120ange", - "Ne", - "\u0120ups", - "avy", - "\u00c3\u00b8r", - "reek", - "\u0120behaviors", - "\u0120reflected", - "\u0120priorities", - "\u0120condu", - "\u0120retreat", - "\u0120expenses", - "\u0120\u00eb\u00b4\u0132", - "\u0120triple", - "\u0120\u00ea\u00b5\u012b\u00ec\u0140\u00a5\u00ed\u0140\u012a", - "\u00c3\u00a4lt", - "\u0120indigenous", - "\u0120mining", - "\u0120acceptable", - "\u0120ruin", - "CA", - "uine", - "\u0120pipeline", - "ctic", - "\u00c3\u00aat", - "\u0120\u00d0\u00b2\u00d1\u0123\u00d0\u00b5\u00d0\u00b3\u00d0\u00be", - "\u0120boun", - "\u0120Digital", - "\u0120Boom", - "\u00d1\u0128\u00d0\u00b5", - "\u0120\u00d0\u00bb\u00d1\u0125\u00d1\u0129", - "\u0120asc", - "\u012e\u0122\u00eb\u00a1\u013e", - "\u0120Goodbye", - "\u0120render", - "enez", - "arre", - "\u0120THAT", - "bour", - "ici\u00c3\u00b3n", - "\u00e3\u0124\u0143", - "Every", - "\u0120wires", - "\u0120Parliament", - "nung", - "ateur", - "\u0120Save", - "\u0120Phys", - "\u0120amor", - "\u0120Eve", - "\u0120fright", - "\u0120gamma", - "\u0120micros", - "mitt", - "\u0120Code", - "\u0120Bey", - "pled", - "\u0120\u00d0\u00b8\u00d1\u0123\u00d0\u00bf\u00d0\u00be\u00d0\u00bb\u00d1\u012e\u00d0\u00b7", - "\u00e7\u0139", - "\u00ec\u0125\u012b", - "\u00e5\u00a5\u00b9", - "\u0120monet", - "\u0120Jahre", - "\u0120luxury", - "\u0120deaf", - "\u0120betray", - "\u0120\u00ea\u00b2\u00b0", - "\u00d0\u00b8\u00d0\u00ba\u00d0\u00b8", - "\u0120defeated", - "\u0120undert", - "\u0120weg", - "\u0120cooler", - "\u00e3\u0123\u0137\u00e3\u0124\u0135", - "iami", - "\u00e9\u0124\u0126\u00e6\u013e\u012b", - "\u0120Jessica", - "\u0120Joy", - "\u0120sophistic", - "\u00d0\u00b5\u00d0\u00bd\u00d0\u00b8\u00d0\u00b8", - "\u00f0\u013f\u013a", - "\u0120chili", - "\u0120Type", - "\u0120proteins", - "\u0120presenting", - "alia", - "\u00ec\u013c\u00b8", - "\u0120Major", - "\u0120molecule", - "umer", - "\u0120collapse", - "\u0120Anyways", - "\u0120Mountain", - "anted", - "\u00e3\u0122\u0132", - "\u0120\u00d0\u00b2\u00d0\u00b8\u00d0\u00b4\u00d0\u00b5\u00d0\u00be", - "\u00e6\u00b0\u00b4", - "Aud", - "\u0120conqu", - "\u0120voll", - "\u0120knit", - "\u0120membr", - "\u0120Market", - "\u0120dari", - "\u0120calculated", - "\u00d0\u00b3\u00d0\u00b8", - "\u0120shrimp", - "\u0120Mu", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d1\u0124", - "\u0120\u00ec\u013a\u0123\u00ec\u0125\u0123", - "\u0120productivity", - "\u0120cognitive", - "\u0120Heb", - "ictions", - "\u00ea\u00b2\u00bd", - "\u0120cr\u00c3\u00a9", - "f\u00c3\u00b6r", - "\u0120praying", - "ashi", - "\u0120Tik", - "\u00c3\u00b3r", - "wen", - "\u00d1\u012e\u00d1\u0130", - "ixo", - "\u0120(\"", - "\u0120\u00d1\u0124\u00d0\u00b5\u00d0\u00bb", - "\u0120\u00ec\u0138\u00b4\u00eb\u0138\u00a4", - "\u0120\u00d0\u00bf\u00d0\u00b5\u00d1\u0122\u00d0\u00b5\u00d0\u00b4", - "\u0120Drive", - "\u00e3\u0122\u0133", - "\u0120Equ", - "\u0120equilibrium", - "\u0120describes", - "\u00d0\u00bd\u00d0\u00b5\u00d0\u00b5", - "42", - "\u0120Current", - "yy", - "\u0120absorb", - "\u0120soldier", - "ders", - "\u0120testimony", - "\u0120decline", - "\u013e\u00eb\u00a1\u013e", - "gage", - "\u0120inspire", - "lapping", - "\u0120spinning", - "\u0120slavery", - "\u0120facial", - "\u0120traditions", - "\u00c3\u00a1rios", - "\u0120Hospital", - "\u0120nest", - "\u0120\u00eb\u012a\u0126", - "\u0120toi", - "\u0120fears", - "\u00ec\u0127\u00a8", - "\u0120Muh", - "\u0120graduation", - "\u0120impacted", - "\u0120aunt", - "\u0120Lets", - "\u0120aluminum", - "\u0120dominant", - "\u0120Davis", - "\u0120Navy", - "\u0120compt", - "oples", - "\u0120estava", - "\u00e8\u00a5", - "\u0120scal", - "\u0120preserve", - "\u0120Opp", - "\u0120practically", - "\u0120magnitude", - "\u0120fitting", - "\u0120coordinate", - "\u0120furniture", - "\u0120Famil", - "\u0120explosion", - "\u0120documentary", - "\u0120Script", - "\u0120portray", - "mat", - "\u0120scheduled", - "\u0120dynamics", - "phy", - "aky", - "\u0120UI", - "Che", - "\u0120continuously", - "\u0120Prov", - "\u00e5\u00b0\u0133", - "\u00d1\u0125\u00d0\u00b7", - "rah", - "\u0120gerne", - "proof", - "\u0120secretary", - "\u0120Patreon", - "scream", - "\u0120Kids", - "\u00e1\u00bb\u0135i", - "\u0120kg", - "\u0120uncertainty", - "\u0120\u00d0\u00ba\u00d0\u00b0\u00d0\u00b6\u00d0\u00b4", - "\u0120mitig", - "\u0120reads", - "\u00e5\u00b7\u00b2", - "\u0120Ru", - "\u0120priest", - "\u0120\u00d0\u00bd\u00d0\u00b5\u00d0\u00b4", - "\u0120limitations", - "\u0120float", - "600", - "\u0120Toy", - "\u0120Jimmy", - "\u0120offensive", - "eni", - "\u0120Xi", - "\u0120eyebr", - "\u0120Turk", - "\u0120accidentally", - "\u0120ohne", - "\u0120Saud", - "95", - "\u0120Dutch", - "\u00d0\u00b0\u00d0\u00bd\u00d1\u0123", - "\u0120Seattle", - "\u0120\u00eb\u0135\u00b1", - "check", - "k\u00c4\u013b", - "\u0120contributions", - "\u0120beside", - "\u0120quindi", - "\u0120flew", - "\u00e6\u0139\u00b6", - "\u00d8\u00b0\u00d8\u00a7", - "\u0120LO", - "\u0120waist", - "\u0120EV", - "\u0120holidays", - "jon", - "\u0120misunder", - "\u00d1\u0131\u00d0\u00bd", - "\u0120bout", - "\u0120dimin", - "\u00e1\u00ba\u00bd", - "\u00c3\u00b3l", - "\u0120Grace", - "\u0120inputs", - "\u0120deny", - "\u0120forming", - "\u0120Bild", - "\u0120adequ", - "\u0120folk", - "\u0120rejected", - "semb", - "\u0120frustrated", - "open", - "\u0120Better", - "ilon", - "\u0120towel", - "\u0120differential", - "\u0120sacred", - "\u0120sail", - "\u00e9\u0129\u012e", - "entimes", - "\u0120gentleman", - "\u0120iconic", - "\u0120comparing", - "\u0120sagt", - "\u0120texts", - "\u0120grandma", - "\u0120rolls", - "\u0120contents", - "\u00e4\u00b8\u012f\u00e5\u00a5\u00bd", - "\u00d0\u00be\u00d1\u0123\u00d1\u0123", - "\u0120suspension", - "roit", - "\u00a6\u00bc", - "\u0120assez", - "\u0120dort", - "\u0120Math", - "\u0120Victor", - "\u0120JavaScript", - "\u00e4\u00b8\u012f\u00e5\u00b0\u012f", - "\u0120enhan", - "\u00c5\u013b", - "\u0120Bush", - "\u0120promotion", - "\u0120kin", - "\u0120monsters", - "\u0120Colorado", - "\u0120\u00ce\u00b2", - "\u00ed\u0137\u00b4\u00ec\u013c\u0136", - "\u00e6\u0143\u00a3", - "ifferent", - "\u0120naked", - "\u0120prod", - "etics", - "\u0120Woman", - "\u0120treatments", - "\u0120estoy", - "v\u00c3\u00a9", - "\u0120lifting", - "\u0120yapt", - "\u0120Rober", - "\u0120\u00ec\u00b9\u013e", - "\u0120substitute", - "aku", - "ridge", - "\u0120\u00ea\u00b1\u00b0\u00eb", - "\u0120responded", - "\u0120b\u00c3\u00a9", - "\u0120Engineer", - "\u0120transferred", - "\u00eb\u00b2", - "\u0120haber", - "oop", - "\u0120WE", - "\u0120vest", - "\u0120forty", - "\u0120DS", - "\u01202004", - "\u0120coaching", - "nom", - "\u0120Bab", - "\u0120nossa", - "\u0120Jake", - "\u0120gy", - "\u0120deleg", - "\u0120\u00ec\u0140\u0142", - "\u0120\u00d0\u00ba\u00d1\u0122\u00d0\u00b0\u00d1\u0123", - "\u0120standpoint", - "\u0120disad", - "\u0120artwork", - "Ad", - "illo", - "\u0120\u00c4\u0133\u00c6\u00b0\u00e1\u00bb\u00a3c", - "\u0120Prom", - "\u0120Lib", - "\u0120criticism", - "\u0120contacts", - "\u00d1\u0122\u00d0\u00b0\u00d0\u00bc", - "\u0120achievement", - "\u00d0\u0136\u00d0\u00b0", - "\u0120dissol", - "\u0120Vegas", - "\u0120streams", - "\u0120Kent", - "\u0120\u00d8\u00b9\u00d9\u0126\u00d9\u012b", - "\u0120radius", - "\u0120sucks", - "\u0120Ach", - "\u0120fi", - "oust", - "\u0120\u00d0\u00bb\u00d1\u0130\u00d0\u00b4\u00d0\u00b8", - "\u0120palette", - "\u0120Haz", - "\u0120Anthony", - "\u0120tema", - "\u0120Cos", - "\u0120safer", - "\u00ce\u00b1\u00cf\u0124", - "\u0120contrad", - "\u0120maior", - "\u0120inflation", - "\u0120Silver", - "\u0120attending", - "\u00ed\u0137\u013e\u00ed\u0127\u012e", - "arto", - "\u0120applauding", - "\u0120computing", - "\u0120Hat", - "\u00e6\u00bb", - "know", - "makers", - "\u0120conoc", - "\u0120educated", - "\u0120modified", - "\u0120inclusion", - "mental", - "\u0140\u0132", - "isia", - "\u0120\u00cf\u0122\u00ce\u00bf\u00cf\u0127", - "\u0120aun", - "\u0120Ireland", - "\u0120k\u00c3\u00b6", - "\u0120compliance", - "\u0120inspiring", - "\u00d0\u00b8\u00d1\u0124\u00d0\u00b5\u00d0\u00bb\u00d1\u012e\u00d0\u00bd\u00d0\u00be", - "\u0120dispos", - "\u00ec\u00b0\u00a8", - "\u0120wip", - "rical", - "rawd", - "\u0120tres", - "\u0120mobil", - "olutions", - "BO", - "\u0120bounce", - "\u0120assumed", - "\u0120Medical", - "\u0120fiscal", - "\u0120ng\u00c6\u00b0\u00e1\u00bb\u013fi", - "itionally", - "\u0120stolen", - "\u0120BM", - "\u0120mechanisms", - "\u00ce\u00b5\u00ce\u00af", - "\u0120qualified", - "\u0120\u00ec\u0140\u0132\u00eb", - "ughters", - "\u0120HIV", - "\u0120Lots", - "\u0120servers", - "\u0120carr", - "\u0120Together", - "\u0120attracted", - "\u0120kr", - "\u00e6\u012a\u0133\u00e6\u013a\u00af", - "thur", - "inin", - "\u0120Half", - "\u00c8\u013d", - "\u0120Pap", - "\u0120reminded", - "ALL", - "\u0120helmet", - "\u0120bottles", - "\u0120professors", - "\u0120seine", - "\u00c5\u0124\u00c4\u0127", - "\u00e3\u0125\u0131", - "\u0120\u00ea\u00b1\u00b0\u00ec\u0137\u00bc", - "\u0120\u00d7\u00a2\u00d7\u013e", - "fun", - "\u0120Bird", - "\u0120fighter", - "\u0120\u00eb\u0136\u00b0\u00eb", - "\u0120Tool", - "\u0120tin", - "inois", - "\u00eb\u00b6\u0126", - "\u00d7\u013b\u00d7\u0141", - "\u0120CAR", - "\u00e5\u0132\u012f", - "irsty", - "\u0120outdoor", - "\u0120NS", - "\u00e3\u0127\u0130", - "ffen", - "\u0120lud", - "Hello", - "\u0120roller", - "iele", - "\u0120Poland", - "\u0120apa", - "exp", - "\u0120certificate", - "\u0120Town", - "\u00d0\u00b0\u00d1\u0130\u00d1\u0124\u00d1\u0123\u00d1\u0131", - "ilde", - "\u0120determin", - "PR", - "\u0120freeze", - "\u0120mainstream", - "\u0120objectives", - "blo", - "\u0120takie", - "\u00e5\u0135\u012a\u00e5\u0135\u012a", - "\u0120\u00eb\u00b0\u0136\u00eb\u00a1\u013e", - "elet", - "\u0120IV", - "\u0120Fast", - "\u0120dere", - "emp", - "\u0120Dra", - "\u0120\u00ec\u0140\u012a\u00ec\u0139\u012a", - "\u0120discrimination", - "\u0120\u00ce\u00b5\u00ce\u00af\u00ce\u00bd\u00ce\u00b1\u00ce\u00b9", - "necess", - "\u00e6\u00ae", - "\u00c4\u00b1\u00c4\u0141\u00c4\u00b1", - "\u0120posting", - "wi\u00c5\u013dcie", - "\u0120lub", - "\u0120olive", - "\u0120rim", - "\u0120modeling", - "\u0120a\u00c3\u00b1o", - "\u0120Pakistan", - "\u0120overl", - "\u0120inflam", - "NE", - "\u00ec\u0139\u0132\u00ea\u00b2\u012e", - "\u0120attended", - "\u0120dealt", - "\u0120Alt", - "\u0120Lincoln", - "\u0120awake", - "\u0120filters", - "\u0120Within", - "czywi\u00c5\u013dcie", - "\u0120s\u00c3\u00bb", - "\u0120Johnny", - "\u0120integrity", - "\u0120isolation", - "\u0120Easy", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b8\u00d0\u00bd", - "\u0120Alice", - "\u0120smiling", - "enix", - ",...", - "\u00ce\u00b6", - "\u0120begun", - "\u0120jewel", - "\u0120conventional", - "\u0120statist", - "\u0120handed", - "\u0120irre", - "\u0120prohib", - "\u0120satellite", - "\u00e9\u00a6\u013b", - "\u0120Indust", - "\u0120traged", - "\u0120trava", - "\u0120ihm", - "\u0120cruel", - "\u0120Agora", - "\u0120Doc", - "\u0120zones", - "\u0120mall", - "\u0120tray", - "\u00d7\u0137\u00d7\u0142", - "\u0120irrit", - "\u0120kans", - "\u0120Beat", - "udge", - "ielle", - "\u0120trusted", - "\u0120bikes", - "\u0120\u00d1\u0125\u00d0\u00bf", - "\u0120Member", - "wick", - "\u0120creators", - "\u0120heritage", - "indistinct", - "\u0120resur", - "ennen", - "Come", - "\u0120firing", - "\u0120Bueno", - "\u0120\u00d0\u00a2\u00d0\u00be", - "ikan", - "ettes", - "\u0120kes", - "\u0120trips", - "\u0120divorce", - "\u0120Kl", - "\u0120consol", - "keep", - "\u00ea\u00b8\u00b0\u00ea\u00b0\u0122", - "\u0120Report", - "\u0120hosting", - "\u0120diamond", - "\u0120complic", - "\u0120helicop", - "\u0120depuis", - "ds", - "\u0120Chan", - "\u00d1\u0131\u00d0\u00bb", - "\u0120scissors", - "ilation", - "\u0120proportion", - "ERE", - "\u0120\u00d9\u012a\u00d8\u00a7\u00d9\u0126", - "inta", - "\u0120muchas", - "uation", - "itis", - "\u00e6\u012c\u012c", - "\u00d1\u0131\u00d1\u012b", - "\u0120niin", - "\u0120emphasize", - "uela", - "\u0120producers", - "\u0120rze", - "\u00c3\u00a4nder", - "ETH", - "\u00e6\u00ba", - "\u0120constitu", - "\u00e5\u013d\u00bd", - "\u0120performances", - "istle", - "gov", - "\u0120Liter", - "\u0120incorporate", - "\u0120educate", - "\u0120Nin", - "\u00ec\u00aa\u00bd", - "\u00d9\u0129\u00d9\u0127", - "eleration", - "\u00d7\u0137\u00d7\u0133", - "\u0120ya\u00c5\u0141", - "orous", - "\u0120Cas", - "\u0120grants", - "\u00eb\u012c\u00a5", - "amel", - "\u0120\u00ea\u00b7\u00b8\u00eb\u0142\u0129\u00ea\u00b2\u012e", - "\u0120Este", - "\u00d1\u0127\u00d0\u00be\u00d0\u00b4\u00d0\u00b8\u00d1\u0124", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d1\u0123\u00d0\u00bb\u00d0\u00b5", - "\u0120gent", - "\u0120focuses", - "alities", - "\u0120Rh", - "\u00eb\u00b3\u00b4", - "\u00e6\u00b0\u0133", - "\u0120Dance", - "rr", - "\u0120amer", - "\u0120utilize", - "\u0120l\u00c3\u0143", - "\u0120Among", - "\u0120pregnancy", - "\u0120loops", - "\u00d0\u00b0\u00d0\u00bb\u00d0\u00be\u00d1\u0123\u00d1\u012e", - "\u0120Moh", - "\u0120catching", - "\u0120glob", - "\u0120ajud", - "\u0120[?", - "\u0120Anal", - "looking", - "\u0120surfaces", - "\u0120progressive", - "\u0120viral", - "08", - "\u00ce\u00be", - "KA", - "\u0120\u00c5\u00bcy", - "\u0120picks", - "annon", - "\u0120bulk", - "\u0120Ross", - "\u0120describing", - "\u0120Gel", - "\u0120locally", - "\u0120endless", - "\u0120massage", - "\u0120cleaned", - "\u0120traveled", - "\u00d0\u00b5\u00d0\u00bd\u00d1\u012d", - "\u0120sentiment", - "igma", - "\u0120Nas", - "\u0120chemicals", - "\u0120righteous", - "\u0120Magic", - "\u0120relates", - "\u0120trucks", - "\u01201960", - "\u00e5\u012a\u00a5", - "\u0120appet", - "\u0120snacks", - "\u0120Summer", - "\u0120y\u00c3\u00bcz", - "\u0120pris", - "\u0120Mexican", - "\u0120transparen", - "\u0120minority", - "\u0120verte", - "\u0120lassen", - "46", - "\u00d0\u00bb\u00d0\u00b5\u00d0\u00ba", - "\u00c3\u00a9p", - "\u0120\u00d1\u0126\u00d0\u00b8\u00d0\u00bb\u00d1\u012e", - "\u0120iyi", - "\u0120span", - "\u00ed\u0137\u013a\u00ec\u00a7\u0122", - "\u0120indicated", - "quar", - "\u0120scholarship", - "\u0120LGBT", - "\u0120historically", - "\u00c3\u00b3\u00c5\u0124", - "\u0120minist", - "\u0120penet", - "\u0120Rap", - "\u0120conservation", - "\u00e7\u013d\u00b4", - "\u0120Honey", - "\u0120Bei", - "idel", - "\u0120responsibilities", - "\u0120messy", - "\u0120Except", - "ORE", - "\u0120initiatives", - "\u0120junior", - "\u0120designers", - "\u0120exploration", - "\u0120sponsor", - "\u0120mobility", - "\u0120integ", - "lando", - "\u0120bark", - "\u0120indicates", - "\u00e0\u00b6", - "\u0120employer", - "\u00e5\u00ae\u012b", - "\u0120cousin", - "\u0120boiling", - "\u0120chrom", - "\u0120\u00c3\u00a7al", - "\u0120perpet", - "\u0120contained", - "\u0120parks", - "\u00d0\u00ab", - "\u0120Engineering", - "Please", - "\u0120Starting", - "hero", - "\u0120lawyers", - "\u00e8\u00a5\u00bf", - "\u0120zd", - "\u0120franchise", - "rage", - "\u0120intuit", - "\u0120GL", - "reach", - "\u0120Elle", - "\u0120nh\u00c6\u00b0", - "\u0120Nord", - "\u0120bean", - "07", - "\u0120pleasant", - "\u00e5\u00bd\u0135", - "viron", - "\u0120gradient", - "zus", - "\u0120EM", - "\u0120essay", - "\u00ec\u0139\u0132\u00ec\u013c\u0136", - "\u00e1\u00ba\u00bfn", - "nu", - "\u00e1\u00bb\u00ab", - "\u0120\u00c3\u012bs", - "\u0120denomin", - "\u0120Girls", - "\u0120personnes", - "\u0120\u00d8\u00a7\u00d9\u0126\u00d8\u00a3", - "bild", - "\u0120Stat", - "\u0120compliment", - "\u0120Kate", - "\u0120optimal", - "\u0120hid", - "\u00d8\u00af\u00d9\u012c", - "\u0120quicker", - "wall", - "En", - "INE", - "???", - "\u00ec\u00b2\u00b4", - "\u0120Action", - "\u00e5\u0141", - "\u0120penalty", - "\u0120Kaz", - "'?", - "\u0120cried", - "\u0120canvas", - "fte", - "\u0120exclud", - "\u00b8\u00eb\u00a1\u013e", - "\u0120emphasis", - "\u0120enzy", - "\u0120Hou", - "\u0120overseas", - "\u00c3\u0143amos", - "\u00e5\u00b8\u00ab", - "\u00c3\u00b6glich", - "\u0120headphones", - "cn", - "\u0120Age", - "\u0120akan", - "\u0120characteristic", - "\u00ed\u0137\u013a\u00eb\u00a9\u00b4", - "gets", - "\u0120\u00eb\u00b6\u012a", - "\u0120rival", - "\u0120borders", - "emente", - "em\u00c3\u00a1s", - "\u0120yol", - "\u0120compe", - "enders", - "\u00c4\u00b1ndan", - "\u0120m\u00c3\u00b6glich", - "\u0120bubbles", - "natural", - "\u0120armed", - "\u0120elabor", - "\u0120\u00ec\u013f\u00b4\u00eb\u00b2\u012a", - "\u0120washed", - "\u00ce\u00bf\u00cf\u0127\u00ce\u00bc\u00ce\u00b5", - "\u00e8\u00ab\u012d", - "\u0120flavors", - "\u0120existe", - "\u0120prest", - "\u0120Thema", - "\u00d0\u00be\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d1\u0123", - "eron", - "UE", - "eri", - "\u0120concer", - "\u0120aix\u00c3\u00b2", - "\u00e5\u0127\u00a9", - "\u0120protective", - "\u0120\u00d0\u00b7\u00d0\u00bd\u00d0\u00b0\u00d1\u0130", - "\u0120\u00eb\u0124\u0142", - "\u0120III", - "\u0120meer", - "\u0120Shop", - "lli", - "\u0120Order", - "\u0120MY", - "\u0120Ghost", - "\u00e3\u0124\u0124\u00e3\u0123\u0128", - "adel", - "\u0120stole", - "\u0120releasing", - "\u0120Comment", - "\u0120trains", - "\u00eb\u00aa\u0127", - "\u0120wissen", - "ensed", - "\u0120descend", - "\u0120fier", - "\u0120radi", - "\u0120persu", - "\u00e7\u00a2", - "\u0120\u00d0\u00bc\u00d0\u00bd", - "\u0120Dest", - "\u0120worries", - "itet", - "bas", - "\u0120stab", - "name", - "oric", - "\u0120Close", - "\u0120alumni", - "\u0120Self", - "ffe", - "itating", - "atherine", - "\u0120Rights", - "\u0120ellos", - "\u0120warrant", - "\u0120nerve", - "\u0120vegetable", - "\u0120Teil", - "\u0120\u00ea\u00b0\u013b\u00ec\u013f\u00b4", - "RY", - "\u0120sustainability", - "\u0120steht", - "\u0120brid", - "ada\u00c5\u0141", - "\u0120tv", - "\u0120duration", - "\u0120pessoa", - "\u0120metrics", - "\u0120adam", - "cas", - "\u00d0\u00b0\u00d1\u0122\u00d0\u00b8", - "\u0120evident", - "\u0120displayed", - "\u00d8\u00a7\u00d8\u00a6", - "\u0120reck", - "\u0120Buddha", - "\u0120dele", - "\u0120Diego", - "osph", - "\u0120bla", - "\u0120Mik", - "ulator", - "\u01202001", - "\u0120promoting", - "ych", - "\u0120EX", - "\u0120lastly", - "\u0120outline", - "\u0120spirits", - "\u0120veux", - "\u0120subtract", - "\u0120\u00c5\u0141imdi", - "\u0120pins", - "\u0120burger", - "\u0120molto", - "\u0120hab\u00c3\u0143a", - "\u0120\u00eb\u00b0\u013a", - "igu", - "erst", - "\u0120nen", - "\u0120bacon", - "itious", - "\u0120carries", - "\u0120promises", - "nde", - "\u0120Left", - "\u0120Lim", - "\u00e6\u00a3", - "\u012044", - "\u0120careers", - "\u0120\u00ec\u00a3\u00bc\u00eb", - "\u0120speeds", - "qu\u00c3\u00a9", - "mad", - "market", - "isme", - "\u01202003", - "\u0120recess", - "\u0120JUD", - "\u0120racist", - "\u0120Schl", - "\u0120parler", - "\u0120otros", - "ishes", - "\u0120converted", - "aaaa", - "\u00d0\u00b0\u00d0\u00bd\u00d0\u00b8\u00d0\u00b8", - "\u0120Ark", - "\u0120Chance", - "\u0120elementary", - "\u00ce\u00b5\u00ce\u00bd", - "inks", - "Interviewer", - "\u0120freely", - "alah", - "\u0120\u00eb\u012d\u00a4\u00eb\u00a5\u00b8", - "\u0120requested", - "\u0120torque", - "no\u00c5\u013dci", - "oured", - "\u0120Staff", - "\u0120stain", - "\u0120Alan", - "\u0120vere", - "\u0120Winter", - "\u0120defect", - "iedy", - "\u0120beats", - "\u0120h\u00c3\u00a1", - "umn", - "oons", - "itudes", - "\u0120seit", - "oly", - "\u0120reserv", - "\u0120extr", - "\u0120physician", - "visor", - "\u0120handful", - "\u0120Nations", - "\u0120\u00ec\u00a2\u012d\u00ec\u013f\u0122", - "uccess", - "\u0120upstairs", - "\u0120Square", - "\u0120hein", - "\u0120Season", - "olis", - "\u0120prince", - "\u0120defensive", - "\u00e7\u00bd", - "\u0120\u00d0\u00bc\u00d0\u00b5\u00d1\u0123\u00d1\u0124", - "\u00d1\u0138\u00d0\u00b9", - "\u0120\u00d8\u00a7\u00d9\u0128", - "umble", - "\u00ea\u00b9\u012e\u00ec\u013c\u0136", - "\u0120assass", - "\u0120circular", - "\u0120qualities", - "\u0120hmm", - "\u0120blown", - "\u0120Liz", - "\u0120Kur", - "\u0120SA", - "\u0120findings", - "\u0120colours", - "\u0120delle", - "\u0120IR", - "\u0120Ath", - "\u0120Dub", - "\u0120Ox", - "\u0120\u00d8\u00ae", - "\u0120pockets", - "\u0120grill", - "\u0120switching", - "\u0120preferred", - "\u0120Wales", - "\u0120exemplo", - "\u0120chopped", - "\u0120vaccination", - "\u0120neuro", - "\u0120specify", - "ivos", - "\u0120ser\u00c3\u00a1", - "\u0120zie", - "\u0120\u00e0\u00ae\u00ae", - "\u0120resulting", - "\u0120Ugh", - "\u0120messed", - "CD", - "\u0120paar", - "\u0120comer", - "\u0120couch", - "\u0120Festival", - "\u012049", - "vous", - "zens", - "\u00e7\u00a8\u00ae", - "\u0120Kennedy", - "\u0120Ts", - "\u0120\u00eb\u00b3\u00b4\u00ec\u0139", - "\u0120demonstration", - "\u0120unto", - "\u0120frustrating", - "\u0120laboratory", - "\u0120egy", - "\u0120beautifully", - "\u0120\u00ec\u0140\u00ac\u00eb", - "\u0120algu", - "\u0120\u00c3\u00b6yle", - "\u00e4\u00bd\u0142\u00e7\u013e\u012d", - "\u0120PH", - "\u0120fortune", - "\u0120cleaner", - "\u0120Robin", - "\u0120saus", - "\u0120Geld", - "\u0120kat", - "obs", - "\u0120olur", - "\u0120matt", - "\u0120questa", - "\u0120suggestion", - "encer", - "\u00d0\u00be\u00d1\u0123\u00d1\u0124", - "\u0120radar", - "\u0120\u00ec\u0140\u00a1", - "isha", - "\u00e0\u00ae\u00a8", - "\u00e3\u0124\u0135\u00e3\u0123\u00aa", - "jes", - "\u0120veel", - "\u00ec\u0124\u00b0", - "\u0120authors", - "\u00e3\u0122\u0130", - "plan", - "\u0120collaborative", - "\u0120instinct", - "\u0120farming", - "auge", - "Edu", - "\u0120membership", - "\u0120simultaneously", - "\u0120bake", - "\u0120k\u00c3\u00a4", - "\u0120lectures", - "\u00d1\u0129\u00d0\u00b5\u00d1\u0123", - "\u0120prendre", - "\u0120collaps", - "\u0120Saya", - "\u0120Fut", - "\u0120yog", - "\u0120Rather", - "\u00d8\u00b1\u00d9\u012c", - "\u0120camps", - "\u00d0\u00be\u00d0\u00bb\u00d0\u00be\u00d0\u00b4", - "\u0120simulation", - "\u0120Mak", - "Laughs", - "\u0120grey", - "\u0120sentences", - "yen", - "\u0120Unless", - "Je", - "\u0120Satan", - "\u0120\u00d1\u0124\u00d0\u00b0\u00d0\u00ba\u00d0\u00b6\u00d0\u00b5", - "\u0120NA", - "\u0120bron", - "\u0120?]", - "\u0120souls", - "\u0120lightning", - "\u0120imagined", - "\u0120czyli", - "psilon", - "etta", - "\u0120believing", - "\u0120strongest", - "\u0120CON", - "\u0120quelques", - "\u0120immigrants", - "\u0120wallet", - "\u00e9\u0122\u013b\u00e6\u013a\u00af", - "\u0120Jersey", - "\u0120implications", - "\u0120forb", - "\u00e3\u0122\u0131", - "\u0120unbelievable", - "\u00d8\u00a7\u00d8\u00a1", - "\u0120operational", - "\u00c3\u00bcs", - "\u0120GM", - "\u0120\u00ea\u00b7\u00b8\u00eb\u0141\u00b0\u00eb\u012f\u00b0", - "\u0120gracias", - "\u0120entend", - "\u0120Regard", - "rob", - "\u0120\u00d1\u0124\u00d0\u00b5\u00d1\u0127", - "\u00e8\u0131", - "\u0120Revolution", - "\u0120waar", - "\u0120Biz", - "theless", - "\u0120sponsored", - "quier", - "\u0120\u00ec\u013f\u00bc\u00eb", - "\u0120tek", - "\u0120\u00eb\u0132\u0142", - "igkeit", - "\u0120Luck", - "\u0120Certainly", - "\u0120toll", - "\u0120\u00d0\u00bd\u00d0\u00b8\u00d1\u0129\u00d0\u00b5\u00d0\u00b3\u00d0\u00be", - "\u0120Money", - "\u0120\u00d1\u0123\u00d1\u0124\u00d0\u00be\u00d1\u0122", - "\u0120Double", - "\u0120Wolf", - "\u0120chunk", - "\u00ce\u00ac\u00ce\u00bd", - "it\u00c3\u00a9s", - "oning", - "Mar", - "\u0120grandes", - "\u0120collections", - "\u0120Europa", - "\u0120\u00d0\u00b0\u00d1\u0122", - "\u0120\u00e2\u0122\u012d\u00e2\u0122\u012d\u00e2\u0122\u012d", - "\u0120\u00ea\u00b7\u00b8\u00eb\u0141\u00ac\u00eb\u00a9\u00b4", - "\u0120\u00d0\u00be\u00d0\u00b1\u00d1\u012c", - "\u0120\u00e3\u0123\u00aa", - "\u0120\u00ec\u012d\u013e\u00ea\u00b0\u0126", - "\u0120Custom", - "\u0120\u00ec\u00b2\u013a", - "\u00d1\u0138\u00d0\u00bb\u00d1\u012e", - "\u0120individually", - "\u00ed\u0139", - "\u0120dozen", - "\u0120owe", - "\u0120Victoria", - "\u00e5\u0131\u00af\u00e8\u0125\u00bd", - "\u0120beet", - "urb", - "\u0120analog", - "i\u00c3\u00a7\u00c3\u00a3o", - "\u0124\u013e", - "soever", - "\u0120modo", - "\u0120subscribed", - "\u00ec\u0140\u00ac", - "\u0120entities", - "\u00e7\u012b\u0129", - "\u0120closet", - "\u0120responding", - "\u0120printer", - "\u0120Stephan", - "\u0120by\u00c5\u0124", - "\u0120Dom", - "\u0120Fern", - "\u0120Pier", - "\u0120wi\u00c4\u013bc", - "\u0120hence", - "\u0120modules", - "\u00e3\u0125\u00ac", - "\u0120\u00eb\u0136\u00b1", - "\u0120Danny", - "\u0120\u00d1\u0123\u00d0\u00b5\u00d0\u00b1\u00d0\u00b5", - "\u0120vad", - "\u0120\u00ec\u0139\u0126", - "\u0120sous", - "\u0120sphere", - "BY", - "\u0120Ped", - "igned", - "\u0120wheat", - "\u0120unders", - "\u0120evolve", - "\u0120declar", - "\u0120lightly", - "\u0120identifying", - "\u00e6\u0126\u0131\u00e6\u0122\u013f", - "\u0120legendary", - "\u0120genuine", - "\u0120grind", - "\u0120Une", - "geben", - "\u0120bicy", - "\u0120jumps", - "\u0120province", - "zi\u00c4\u013b", - "\u0120\u00d7\u0132\u00d7\u0142\u00d7\u013b", - "\u0120hoc", - "\u0120\u00d0\u00b1\u00d0\u00bb", - "\u0120Grad", - "\u0120revenge", - "\u0120\u00d8\u00a7\u00d9\u0126\u00d8\u00aa", - "ooh", - "\u00e6\u012d\u013e", - "\u00d0\u00b0\u00d1\u0128\u00d0\u00b8\u00d0\u00b8", - "\u00e5\u00b9\u00b3", - "\u0120electro", - "\u0120\u00eb\u0132\u0132", - "\u00e3\u0123\u00a7\u00e3\u0123\u00af", - "\u0120fals", - "riel", - "oker", - "\u0120Excellent", - "\u0120Morgan", - "\u0120brick", - "\u0120substantial", - "\u0120pollution", - "\u0120T\u00c3\u00bcr", - "\u0120Evet", - "\u0120lung", - "\u00e3\u0123\u0138", - "\u00d7\u013b\u00d7\u00a9", - "ommes", - "\u0120realizing", - "\u0120humble", - "\u0120Lock", - "\u0120bod", - "\u0120\u00ec\u0138\u00b8", - "\u0120peers", - "uzz", - "\u0120embedded", - "\u0120claro", - "\u0120aggreg", - "\u0120employers", - "\u0120Raj", - "\u0120\u00e3\u0123\u00a8", - "\u0120Yi", - "\u0120jeu", - "aters", - "\u0120strikes", - "nos", - "autres", - "dr", - "opher", - "\u0120Apparently", - "\u00ed\u013a\u0126", - "\u0120infant", - "\u00d8\u00a7\u00d8\u00a8", - "\u00d1\u0124\u00d1\u012d", - "\u00ed\u013d", - "\u00da\u00af", - "\u0120redes", - "aca\u00c4\u0141\u00c4\u00b1m", - "\u0120DAVID", - "\u0120Chicken", - "\u0120perspectives", - "\u0120viewer", - "\u0120shar", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d0\u00b8\u00d0\u00b7", - "ligt", - "eros", - "itable", - "\u00d0\u00b8\u00d0\u00bb\u00d0\u00be\u00d1\u0123\u00d1\u012e", - "\u0120dif\u00c3\u0143", - "\u00b4\u00eb\u012f\u00b0", - "\u0120retired", - "\u0120thats", - "zenie", - "beiten", - "\u0120mycket", - "\u0120Rab", - "\u0120inflamm", - "\u00ec\u00b0\u00ae", - "\u0120dum", - "\u0120daddy", - "\u00e6\u013e\u0141", - "\u0120immers", - "\u0120playlist", - "\u00e0\u00af\u0128", - "\u0120traum", - "\u0120refuse", - "step", - "\u00e0\u00ae\u013c", - "cup", - "\u0120pops", - "rimin", - "ay\u00c4\u00b1m", - "\u0120ald", - "\u0120unnecess", - "\u0120dah", - "\u0120Irish", - "\u0120compr", - "la\u00c5\u0141", - "TP", - "\u0120translated", - "Sc", - "ce\u00c4\u0141im", - "\u00b4\u0132", - "\u0120drei", - "\u0120\u00d0\u00bb\u00d1\u0130\u00d0\u00b4\u00d0\u00b5\u00d0\u00b9", - "\u0120quiero", - "\u0120hele", - "zlich", - "\u0120apples", - "\u0120districts", - "\u0120credits", - "\u0120asp", - "\u0120\u00eb\u012d\u00a8", - "oral", - "\u00e5\u00bd\u00b1", - "\u0120stepping", - "\u0120Va", - "\u0120gains", - "65", - "\u0120nuestra", - "eday", - "assador", - "\u0120Lind", - "\u0120crops", - "ciendo", - "igue", - "\u0120bana", - "Am", - "\u0120pent", - "\u0120addiction", - "\u0120packaging", - "\u00c3\u00a4d", - "\u00aa\u00a8", - "\u0120perqu\u00c3\u00a8", - "\u0120campaigns", - "\u0120steep", - "\u0120neue", - "\u0120embarrassed", - "\u0120distinction", - "itzer", - "\u00e5\u0133\u012c", - "\u0120registration", - "\u0120llam", - "\u0120Almighty", - "liest", - "\u0120uz", - "nak", - "\u00e7\u00ba", - "\u0120teraz", - "iamente", - "\u0120transactions", - "\u0120c\u00c3\u00b4t", - "\u0120switched", - "\u0120combo", - "\u0120prayers", - "\u0120internship", - "\u0120addresses", - "\u0120charity", - "\u0120WOO", - "\u0120bait", - "\u00e8\u00bf\u0129", - "\u0120\u00ef\u00bf\u00bd", - "\u0120fica", - "\u0120Tyler", - "aru", - "\u0120atoms", - "\u0120Level", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d1\u0124\u00d0\u00be\u00d0\u00bc", - "\u0120fame", - "ulk", - "\u0120teaches", - "\u0120rebuild", - "\u00d0\u00b5\u00d0\u00b4\u00d1\u012e", - "\u0120Indonesia", - "ushi", - "\u0120Short", - "\u0120ensuring", - "fs", - "ele", - "\u0120marginal", - "\u0120conclude", - "amt", - "\u0120verify", - "\u0120McDonald", - "\u0120skal", - "\u0120reconst", - "\u0120Mann", - "\u0120basement", - "\u0120transformed", - "\u0120occasionally", - "zone", - "\u0120Dans", - "\u0120\u00d0\u00ba\u00d0\u00b0\u00d0\u00ba\u00d0\u00be\u00d0\u00b9", - "\u0120diagnosed", - "\u0120\u00cf\u0126\u00ce\u00b1", - "\u0120commands", - "\u0120presidential", - "\u0120abb", - "\u0120bracket", - "\u0120Lem", - "\u00c3\u00a5ng", - "\u0120favorites", - "\u0120revol", - "\u0120\u00ed\u012c\u00b9", - "\u0120harass", - "\u00e9\u0127", - "\u0120cleans", - "st\u00c3\u00a4nd", - "\u0120knocked", - "\u0120peoples", - "\u0120musicians", - "\u0120mutual", - "\u0120Cold", - "88", - "zej", - "atie", - "\u0120Honor", - "\u0120obsessed", - "\u0120MUSIC", - "\u0120Break", - "\u00c3\u00bang", - "\u0120modify", - "\u0120s\u00c3\u00b6yle", - "\u0120\u00d7\u0140\u00d7\u0136", - "\u0120Online", - "fo", - "\u0120Miller", - "\u0120liking", - "\u0120inhab", - "\u0120gratitude", - "\u0120Journal", - "arness", - "John", - "\u0120Git", - "\u00e5\u012b\u013d", - "\u0120sincere", - "\u0120Sci", - "\u0120Eli", - "\u0120symbols", - "\u0120manually", - "\u00ce\u00b5\u00cf\u0124", - "\u0120\u00d0\u00b2\u00d1\u0138\u00d0\u00b4", - "\u0120Fat", - "\u0120labels", - "\u0120sophisticated", - "umps", - "\u0120releases", - "\u012047", - "\u0120OM", - "\u00ea\u00b0\u0122\u00eb", - "\u0120Bien", - "\u0120Ref", - "\u00e8\u00a8\u013a", - "\u0120Sta", - "\u0120Egg", - "\u0120indicator", - "pson", - "\u0120nas\u00c4\u00b1l", - "Right", - "\u0120convey", - "\u0120knot", - "\u0120connects", - "ulas", - "\u0120preced", - "\u0120inequality", - "amiento", - "\u0120reply", - "OY", - "\u0120dismiss", - "\u0120\u00eb\u0132\u013e", - "\u00e7\u0126\u00a1", - "\u0120\u00d1\u0127\u00d0\u00be\u00d1\u0122\u00d0\u00be\u00d1\u012a\u00d0\u00be", - "\u0120m\u00c3\u00a9d", - "\u0120randomly", - "\u0120Ont", - "uard", - "\u0120pulls", - "\u0120\u00d1\u0124\u00d0\u00b5\u00d0\u00bf\u00d0\u00b5\u00d1\u0122\u00d1\u012e", - "\u0120Need", - "\u0120Soft", - "\u0120strengths", - "\u0120goed", - "umen", - "\u00e6\u0143\u00bb", - "\u0120\u00ed\u0130\u00b8", - "\u0120\u00d0\u00b4\u00d0\u00be\u00d0\u00b1", - "\u0120clarity", - "\u0120Ai", - "\u0120balloon", - "\u0120Pand", - "\u0120\u00ec\u0137\u0126\u00eb\u012d", - "\u0120shiny", - "\u0120smallest", - "onia", - "hill", - "oting", - "\u0120eing", - "\u0120merely", - "\u0120seus", - "\u0120\u00d0\u00bd\u00d0\u00b5\u00d0\u00bf", - "\u0120\u00ed\u0128\u00b5", - "\u0120guides", - "\u0120specialist", - "\u0120steak", - "\u00e3\u0124\u012a\u00e3\u0123\u0128", - "\u0120migration", - "quele", - "\u0120ruined", - "\u0120pupp", - "\u00e5\u00a5\u00b3", - "\u0120kend", - "angan", - "\u0120palm", - "\u0120unfair", - "\u0120zm", - "\u0120DV", - "chester", - "\u00d0\u00b8\u00d1\u0130", - "\u0120ooh", - "erg", - "ATH", - "\u00b0\u00a9", - "\u00e5\u0135\u00aa", - "rison", - "\u0120involving", - "\u0120partly", - "an\u00c3\u00a7ais", - "\u0120vow", - "\u0120prominent", - "\u0120cryst", - "iba", - "\u0120deserves", - "\u0120overt", - "\u0120sensit", - "\u0120Whe", - "\u0120tighten", - "\u0120intimid", - "\u0120aliment", - "will", - "\u0120strengthen", - "\u0120Tan", - "\u00e5\u0131\u012a", - "\u00e3\u0123\u0139\u00e3\u0123\u00be\u00e3\u0123\u013b", - "oni", - "\u0120Mun", - "\u0120proph", - "\u0120rehears", - "\u0120Kle", - "\u0120veces", - "\u0120wondered", - "oki", - "\u0120senses", - "\u00b4\u00ec\u012d", - "\u00c6\u00b0\u00e1\u00bb\u013d", - "\u0120\u00c8\u013bi", - "\u0120muchos", - "\u0120watches", - "ortunate", - "\u0120Juan", - "\u00ec\u0140\u0138\u00ec\u0137\u0126", - "\u00d1\u0122\u00d0\u00b5", - "ei", - "ionen", - "\u0120experimental", - "\u0120daughters", - "\u00e0\u00b8\u013d", - "\u0120mentally", - "becca", - "aware", - "\u00ec\u0126\u013f", - "\u0120whatsoever", - "\u0120enables", - "\u0120Low", - "oid", - "\u00e0\u00b8\u012c", - "\u00c3\u00b3d", - "\u00d8\u00ba", - "\u0120constructed", - "\u0120Ladies", - "\u0120accused", - "\u0120\u00d0\u00b0\u00d0\u00bd", - "Dan", - "\u0120spawn", - "\u0120containers", - "\u0120artistic", - "\u00c4\u00b1p", - "\u0120discl", - "\u0120autres", - "inas", - "\u0120Nation", - "\u0120nag", - "bean", - "whe", - "\u013e\u00eb\u0131\u0126", - "\u0120Seoul", - "\u0120\u00ed\u0131\u00ac", - "\u0120Nich", - "\u0120complement", - "\u0120interven", - "\u0120Model", - "\u0120Orange", - "namon", - "\u0120calculation", - "see", - "\u0120ustedes", - "\u0120leb", - "\u0120doct", - "\u00d1\u0138\u00d0\u00bd", - "\u0120foster", - "\u0120elastic", - "\u0120Ahh", - "\u0120ace", - "\u0120Pink", - "\u0120Jeg", - "\u0120deer", - "\u00e3\u0123\u0139\u00e3\u0123\u0126", - "sis", - "\u0120jako", - "\u0120Emma", - "\u00d1\u0123\u00d1\u0124\u00d0\u00b2\u00d0\u00b5\u00d0\u00bd\u00d0\u00bd\u00d0\u00be", - "\u0120portrait", - "\u0120maker", - "\u0120aument", - "\u00d1\u0122\u00d0\u00be\u00d0\u00b1", - "\u0120airplane", - "\u0120transparency", - "\u0120adjustment", - "\u0120CDC", - "\u00c3\u00a7on", - "\u0120uploaded", - "\u0120\u00d0\u00b4\u00d0\u00b5\u00d0\u00b9\u00d1\u0123\u00d1\u0124\u00d0\u00b2", - "\u0120\u00d0\u00b3\u00d0\u00be\u00d1\u0124\u00d0\u00be\u00d0\u00b2", - "\u0120iter", - "\u0120curse", - "\u00c3\u00b4n", - "merce", - "aran", - "\u0120leak", - "\u00e7\u00b5\u0132", - "\u0120absence", - "\u00d1\u0123\u00d0\u00ba\u00d0\u00b8\u00d0\u00b9", - "\u0120readers", - "aler", - "\u0120beneath", - "ango", - "hetic", - "\u0120finns", - "\u0120poop", - "\u0120duplic", - "Hi", - "igs", - "ologically", - "opp", - "\u0120dizer", - "\u0120Allen", - "\u0120gli", - "\u0120acceleration", - "\u0120vitamin", - "\u00e3\u0125\u0143", - "v\u00c3\u00a4", - "\u0120Access", - "\u00e0\u00ae\u013b", - "r\u00c3\u00a1s", - "\u0120appreciated", - "\u0120nah", - "\u0120poster", - "\u0120tale", - "\u0120highlighted", - "\u00e6\u0138\u0129", - "\u00c5\u00bceli", - "\u0120blockchain", - "\u0120microw", - "\u0120cinema", - "\u0120Chang", - "\u0120Search", - "usters", - "\u0120Zero", - "\u0120Division", - "\u00d1\u0122\u00d0\u00b0\u00d1\u0123", - "\u0120scare", - "\u0120jelly", - "\u0120Administration", - "SO", - "\u0120lined", - "\u0120\u00ea\u00b0\u0126", - "\u0120geben", - "\u0120soda", - "\u0120winners", - "\u00b3\u00bc", - "\u00d9\u0134", - "\u0120Amb", - "\u00e5\u0137\u0131\u00e9\u00a1\u012e", - "\u00e5\u0136", - "\u0120peg", - "\u00e5\u00b7\u00b1", - "43", - "\u0120raus", - "\u0120rewards", - "\u0120inclus", - "\u0120highway", - "\u0120hah", - "\u0120multiplied", - "\u0120s\u00e1\u00ba\u00bd", - "\u0120disciples", - "\u0120ning", - "\u0120dressing", - "\u0120attributes", - "\u0120Mosc", - "\u0120Greece", - "\u0120sek", - "\u0120Learn", - "\u0120jus", - "rendre", - "\u0120personne", - "plete", - "\u0120placing", - "\u0120luego", - "illance", - "\u0120\u00d0\u00be\u00d0\u00b1\u00d1\u012b", - "\u0120provision", - "\u0120lion", - "tra", - "boards", - "\u0120behaviour", - "hey", - "\u0120subscription", - "\u0120protagon", - "\u00e3\u0125\u00a3", - "\u0120vara", - "\u0120\u00c5\u0141u", - "\u0120haha", - "\u0120teaspoon", - "\u00e6\u0141", - "avoir", - "\u0120crypto", - "\u0120\u00d1\u0123\u00d1\u0124\u00d0\u00b0\u00d1\u0122", - "\u0120Store", - "abs", - "\u0120Students", - "\u0120laund", - "into", - "\u0120approached", - "\u00b0\u013e", - "\u00d1\u0125\u00d1\u0130\u00d1\u012b", - "\u0120Labor", - "otes", - "iatric", - "\u0120gro\u00c3\u0141", - "utive", - "\u0120\u00d0\u00b8\u00d0\u00b4", - "\u0120Gib", - "\u0120placement", - "\u0120dif\u00c3\u0143cil", - "\u0120frog", - "\u0120\u00d0\u00b2\u00d1\u0123\u00d0\u00b5\u00d1\u0127", - "\u0120Jr", - "azed", - "\u00d1\u0125\u00d1\u012b", - "\u0120\u00ea\u00bc", - "frame", - "\u00d0\u00b0\u00d0\u00b5\u00d1\u012a\u00d1\u012e", - "\u0120lockdown", - "\u00e5\u0133\u00b3", - "\u0120medi", - "\u0120\u00d7\u0136\u00d7\u0140\u00d7", - "\u00d0\u00b5\u00d0\u00bd\u00d0\u00b8\u00d0\u00b9", - "emale", - "\u00ec\u00a2\u0127", - "ateral", - "\u0120distant", - "\u0120bears", - "\u0120journalist", - "\u00e8\u00a7\u00a3", - "\u0120Marshall", - "\u0120Ihnen", - "uetooth", - "bag", - "\u0120\u00c4\u0133\u00c3\u00a3", - "\u0120Highness", - "\u0120\u00ec\u00b0\u012f", - "\u00d0\u00b8\u00d0\u00ba\u00d0\u00b0", - "\u0120Wu", - "\u0120Fran", - "\u0120peng", - "\u0120fon", - "\u0120hypothesis", - "\u0120\u00d1\u0122\u00d1\u0125", - "\u0120ly", - "\u00d7\u013c", - "\u00ec\u013d\u0136", - "\u0120Radio", - "\u00e0\u00b8\u0140", - "Dav", - "\u0120embarrassing", - "\u0120\u00ec\u0140\u012a\u00ec\u0138\u00b4", - "\u0120casting", - "\u0120cage", - "\u0120Psych", - "\u0120\u00ec\u013f\u00bc\u00eb\u012d\u00a8", - "\u0120\u00c5\u00be", - "imb", - "\u0120directors", - "SH", - "\u0120\u00cf\u0126\u00ce\u00b7\u00ce\u00bd", - "\u00e1\u00bb\u0123u", - "\u0120konu\u00c5\u0141", - "\u0120optional", - "quarters", - "iker", - "\u0120Sant", - "\u0120verses", - "\u00eb\u00b6\u0122", - "\u0120olar", - "\u0120\u00cf\u0129", - "\u00e3\u0125\u0137", - "\u0120\u00ce\u00b3\u00ce\u00b9\u00ce\u00b1", - "\u0120Imm", - "\u0120controversial", - "\u0120ersten", - "\u0120recip", - "\u0120Christianity", - "\u0120\u00ea\u00b4\u013e", - "ordon", - "\u00d7\u0137\u00d7\u00a9", - "\u0120slash", - "\u0120Pf", - "\u00d1\u0125\u00d0\u00b4\u00d1\u012e", - "\u00d7\u0137\u00d7\u013f", - "\u0120Perry", - "\u0120mamy", - "\u0120backgrounds", - "\u0120\u00e0\u00ae\u0130\u00e0\u00ae\u00a9", - "\u0120pendant", - "\u0120Columbia", - "\u0120inverse", - "\u0120\u00d1\u0129\u00d0\u00b5\u00d1\u0122\u00d0\u00b5\u00d0\u00b7", - "\u0120sv", - "\u0120digging", - "41", - "chem", - "\u0120navigation", - "\u0120Shin", - "\u0120Front", - "PD", - "\u0120bearing", - "\u0120Wasser", - "\u0120wax", - "\u0120CHRIS", - "ching", - "\u0120pressed", - "El", - "\u0120Dal", - "onsin", - "\u0120binding", - "\u00d1\u0123\u00d0\u00ba\u00d0\u00be\u00d0\u00b9", - "poons", - "\u0120mock", - "arest", - "\u00d0\u00ba\u00d1\u0122\u00d0\u00b0", - "MM", - "\u0120corrupt", - "storm", - "\u0120refres", - "\u0120Coach", - "ll\u00c3\u00a4", - "\u0120THIS", - "\u0120parag", - "\u0120\u00ec\u0135\u00b0", - "pool", - "\u0120billions", - "\u0120\u00ea\u00b9\u0122", - "group", - "\u0120welcoming", - "cellence", - "\u0120Duke", - "\u00ea\u00b8\u00b4", - "\u0120primera", - "\u00ec\u0142\u00b8", - "\u0120pond", - "\u0120statue", - "\u0120\u00ea\u00b5\u00ac\u00eb", - "\u0120hatch", - "\u0120instrumental", - "\u0120residential", - "\u00ec\u00bb\u00a4", - "\u0120accepting", - "oshi", - "date", - "\u0120\u00ec\u0136\u00a8", - "\u0120planted", - "\u0120joking", - "\u0120\u00ec\u0126\u013e", - "\u0120hated", - "\u0120\u00d1\u0122\u00d0\u00b0\u00d1\u0123\u00d1\u0123\u00d0\u00ba", - "\u0120slept", - "\u0120packages", - "\u0120islands", - "esen", - "\u00c4\u0141\u00c4\u00b1", - "\u0120diagon", - "\u0120Osc", - "\u0120mesh", - "\u0120scales", - "arity", - "\u0120Defense", - "\u00e3\u0123\u00a1\u00e3\u0124\u0129", - "\u0120Lewis", - "\u0120\u00d1\u0123\u00d0\u00b5\u00d0\u00b3\u00d0\u00be\u00d0\u00b4\u00d0\u00bd\u00d1\u0131", - "\u0120flies", - "uinely", - "\u0120Consider", - "\u0120stark", - "hew", - "\u0120As\u00c3\u0143", - "\u00b3\u00b4\u00eb", - "\u0120propose", - "\u0120\u00ed\u0137\u013a\u00eb\u00a9\u00b4", - "odo", - "\u0120Normally", - "\u0120heeft", - "\u0120Harris", - "gro", - "\u0120Blood", - "base", - "\u0120iOS", - "\u0120touches", - "\u0120inspir", - "\u0120\u00d7\u0135", - "\u0120binary", - "\u0120\u00ec\u00b6\u0136", - "\u0120serial", - "\u0120ion", - "\u0120unemployment", - "\u0120odds", - "\u0120Fab", - "\u0120FBI", - "BRUN", - "\u0120weights", - "\u00ce\u00bd\u00ce\u00bf", - "atile", - "\u0120nurses", - "\u0120involvement", - "\u0120\u00ed\u0136\u00bc", - "\u0120governance", - "\u0120\u00e2\u0124\u00ac", - "\u00d1\u0122\u00d1\u0125\u00d0\u00bf", - "ierra", - "\u00ed\u013a\u0137", - "\u0120Jerry", - "\u0120beard", - "\u0120salvation", - "\u0120Along", - "gentle", - "\u0120Ki", - "bol", - "\u0120Plat", - "\u0120hasht", - "\u00e8\u00bf\u0133", - "\u0120ware", - "\u0120partie", - "ycz", - "\u0120intr", - "Fih", - "nent", - "\u0120cheat", - "ilen", - "\u0120\u00eb\u00af", - "orie", - "\u0120f\u00c3\u00a1cil", - "etric", - "\u0120affecting", - "unciation", - "\u0120affairs", - "\u0120bee", - "\u0120viewing", - "\u0120orang", - "\u0120Lan", - "\u0120\u00d0\u00a1\u00d1\u0124", - "\u00e4\u00b8\u0138", - "\u0120Mes", - "\u0125\u0123", - "erie", - "\u0120espa", - "\u0120interpre", - "\u0120possess", - "\u0120purely", - "rito", - "found", - "asma", - "\u00ec\u0142\u0123\u00ec\u013f\u00b8", - "\u0120examine", - "\u0120\u00d1\u0125\u00d0\u00bc", - "\u0120besch", - "\u0120Tomorrow", - "\u0120Block", - "\u0120variant", - "\u0120preference", - "\u0120coaches", - "\u0120medications", - "\u0120\u00ed\u013a\u0126", - "\u0120empire", - "\u00eb\u0126\u00a4", - "\u0120Illinois", - "\u0120crispy", - "\u0120th\u00c3\u00ac", - "\u0120bees", - "77", - "\u0120glow", - "\u00e8\u00ba", - "\u0120Studies", - "\u00e5\u0132\u0126", - "\u0120Challenge", - "\u0120unlikely", - "\u00d0\u00a7", - "\u00c4\u00b1yorsun", - "DIE", - "\u0120minimize", - "izard", - "\u0120\u00c3\u00ban", - "\u0120encontrar", - "\u0120Kill", - "\u00e5\u00bb", - "\u0120vanilla", - "\u0120Grant", - "\u0120GT", - "sea", - "\u0120sought", - "\u00d0\u00b2\u00d0\u00be\u00d0\u00b4", - "\u0120n\u00c3\u00a4m", - "\u0120Aunt", - "OWN", - "\u0120pumpkin", - "stellen", - "\u0120rag", - "\u00d0\u00b5\u00d0\u00b3\u00d0\u00b4\u00d0\u00b0", - "\u0120storyt", - "\u0120forum", - "\u00e6\u00a9\u0141", - "\u0120estaba", - "uche", - "\u0120congress", - "\u0120Rey", - "\u0120dramatically", - "\u0120Sport", - "\u0120Yellow", - "\u0120\u00ea\u00b3\u0126\u00ec\u0128\u012f", - "\u0120disgusting", - "\u0120Recent", - "\u0120acquired", - "\u0120cables", - "\u00e7\u0136\u013c", - "din", - "\u0120visto", - "\u0120communicating", - "\u00d1\u0123\u00d1\u0124\u00d0\u00b0\u00d0\u00b2\u00d0\u00bb\u00d1\u0131", - "\u00d0\u00b5\u00d1\u0123\u00d1\u0124\u00d0\u00be", - "\u00e3\u0125\u00bb\u00e3\u0125\u00bb\u00e3\u0125\u00bb", - "\u0120r\u00c3\u00a9g", - "\u0120socks", - "\u0120proces", - "because", - "\u0120utter", - "\u0120colocar", - "\u0120newest", - "\u0120gramm", - "\u00e8\u00a1\u00a8", - "\u00e4\u00b8\u012f\u00e7\u0141\u00a5\u00e9\u0123\u0135", - "\u0120shifting", - "\u0120carrier", - "\u0120\u00d1\u0123\u00d0\u00ba\u00d0\u00be\u00d1\u0122", - "\u0120Schw", - "\u0120executed", - "\u0120maintained", - "\u0120\u00cf\u0128", - "\u0120Moses", - "\u0120disse", - "\u0120horr", - "\u00e3\u0122\u013e", - "\u0120rally", - "\u0120allem", - "\u0120Eventually", - "\u0120diyor", - "lvania", - "\u0120schnell", - "\u0120\u00ea\u00b3\u00bc", - "\u0120\u00eb\u00a7\u00a4", - "\u0120struggles", - "late", - "\u0120clarify", - "\u00c3\u00a9ment", - "\u0120multiplic", - "\u00d0\u00b8\u00d0\u00b1\u00d0\u00be", - "\u0120journ", - "\u0120fragr", - "\u0120surprisingly", - "\u0120desperate", - "52", - "\u0120sul", - "\u0120Read", - "\u0120Fried", - "\u0120mond", - "woo", - "\u0120organizing", - "\u00e3\u0123\u0139\u00e3\u0124\u0129\u00e3\u0123\u0128", - "\u0120Soon", - "\u0120\u00d0\u00b2\u00d0\u00be\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d1\u0123", - "\u0120Nur", - "\u0120\u00d0\u0139\u00d0\u00b4", - "\u0120spider", - "\u00d0\u00b5\u00d1\u0123\u00d1\u0131", - "\u0120tutorials", - "\u0120nutrients", - "orer", - "\u0120coefficient", - "\u0120arrangement", - "\u0120pricing", - "nan", - "yu", - "BL", - "\u0120tribe", - "\u0120Howard", - "unks", - "\u0120newer", - "\u0120provin", - "\u0120prediction", - "hos", - "\u0120olsun", - "\u0120Around", - "\u0120vier", - "\u0120\u00d1\u0123\u00d1\u0124\u00d0\u00be\u00d1\u0122\u00d0\u00be\u00d0\u00bd", - "\u0120valley", - "\u0120Ela", - "ifi", - "\u0120galaxy", - "\u0120tranqu", - "\u0120advers", - "\u0120Temple", - "iffs", - "igence", - "\u00e8\u0129\u00aa\u00e5\u00b7\u00b1", - "\u0120k\u00c3\u00b6nnte", - "\u0120\u00c4\u0133\u00c3\u00b3", - "Did", - "\u0120photographs", - "\u0120AWS", - "\u00d1\u0128\u00d0\u00b8\u00d1\u0131", - "\u0120guards", - "\u0120appointed", - "\u0120Gil", - "\u0120\u00d0\u00bc\u00d0\u00be\u00d0\u00bc", - "\u0120cod", - "\u0120Unlike", - "\u0120evenly", - "isconsin", - "\u0120estou", - "\u0120mnie", - "\u0120Exec", - "\u0120MV", - "\u0120Eine", - "\u00e4\u00bf\u00a1", - "\u0120Roger", - "\u0120Fac", - "\u0120List", - "\u0120fuer", - "\u00d0\u00b0\u00d0\u00b5\u00d1\u0124\u00d0\u00b5", - "omed", - "\u0120attraction", - "\u00e8\u012b\u00b2", - "\u0120terrain", - "\u0120Drop", - "\u0120corporations", - "\u0120sciences", - "\u0120throne", - "\u00e3\u0123\u0126\u00e3\u0123\u0141", - "\u0120aj", - "\u0120Rot", - "\u00e7\u012b\u00b9", - "\u0120supporters", - "\u0120Bere", - "Here", - "\u0120diferentes", - "\u0120significance", - "\u00cf\u0125\u00ce\u00b7", - "\u00e6\u012a\u0133\u00e8\u00a6\u00ba\u00e5\u00be\u0139", - "\u0120clamp", - "\u0120\u00eb\u012e\u0122\u00eb", - "\u0120fabulous", - "rez", - "\u00e6\u012e\u0123", - "\u0120assumptions", - "uther", - "wid", - "pot", - "\u00e8\u00bf\u0130", - "\u0120yan", - "ulin", - "\u00d1\u0122\u00d1\u012d\u00d0\u00b2", - "\u0120Slow", - "\u0120Pennsy", - "\u0120\u00ed\u0137\u00b4\u00ec\u0126\u013e", - "\u0120meio", - "\u0120wealthy", - "\u0120Eight", - "\u0120pulse", - "\u0120friction", - "idity", - "\u0120Holl", - "iyorum", - "\u0120sounded", - "\u0120Carr", - "\u0120fork", - "\u00e2\u013a", - "\u0120PA", - "\u0120conspir", - "\u0120coding", - "rt", - "\u0120Typ", - "\u0120\u00ec\u0138\u0133", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00b3", - "\u0120miser", - "\u0120\u00d1\u0123\u00d0\u00bc\u00d0\u00be\u00d1\u0124\u00d1\u0122", - "\u0120Sweden", - "\u0120olarak", - "\u0120Zhang", - "\u0120Chi", - "\u0120Titan", - "\u0120screening", - "\u0120Spider", - "\u0120\u00c5\u0140imdi", - "\u0120obstacles", - "lara", - "\u0120challenged", - "pse", - "TON", - "\u00e1\u00bb\u00a5", - "\u0120Pi", - "\u0120lagi", - "ieurs", - "\u0120hurting", - "\u0120neglect", - "\u0120generating", - "\u0120youngest", - "\u0120audit", - "\u0120\u00d1\u0122\u00d0\u00b5\u00d0\u00b7", - "\u00cf\u0123\u00ce\u00ac", - "\u0120donate", - "\u0120PDF", - "\u0120visits", - "\u0120cruise", - "PP", - "aser", - "\u0120wsp", - "backs", - "ivals", - "\u00e3\u0123\u0128\u00e3\u0124\u0135", - "\u0120deve", - "\u0120proport", - "\u0120cath", - "\u0120Effect", - "\u0120winds", - "\u0120\u00ec\u013b\u0136", - "\u0120charts", - "\u0120sama", - "\u0120automation", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00ba\u00d0\u00b0", - "\u0120olan", - "\u0120boats", - "\u0120cafe", - "\u0120denied", - "\u0120Mama", - "\u0120blocking", - "\u0120Thor", - "\u0120phenomenal", - "\u0120stakeholders", - "\u0120unos", - "\u00d1\u0125\u00d0\u00b5\u00d1\u0124", - "\u0120Abraham", - "\u00e3\u0123\u00a7\u00e3\u0124\u0124", - "\u0120detection", - "\u0120juris", - "\u0120powered", - "zial", - "\u0120welfare", - "\u0120upgrad", - "\u0120mo\u00c5\u00bcna", - "\u0120Case", - "cular", - "\u0136\u00ec\u013f\u00b4", - "\u00e3\u0125\u0123", - "\u0120Guess", - "\u0120cycles", - "\u00e4\u00be\u012d", - "\u00e7\u00b5\u00a6", - "rock", - "umi", - "\u0120elite", - "\u0120qu\u00c3\u00a8", - "\u00e5\u0142\u00b1", - "\u00d1\u0124\u00d0\u00be\u00d0\u00bc", - "\u0120shore", - "gunta", - "\u0120ku", - "\u0120faithful", - "\u0120Jeremy", - "aid", - "\u00e0\u00b7", - "ugal", - "\u00e5\u00b0\u012f\u00e5\u0137\u012c", - "\u0120Vel", - "\u0120vrai", - "stell", - "\u00a8\u00b8", - "\u0120kol", - "\u00e8\u00bd", - "\u0120quanto", - "\u0120\u00d0\u00b7\u00d0\u00b0\u00d1\u0122", - "\u01202002", - "esy", - "\u0120reserve", - "\u0120\u00d0\u00bc\u00d0\u00be\u00d0\u00bc\u00d0\u00b5\u00d0\u00bd\u00d1\u0124", - "\u0120deployed", - "\u0120defining", - "\u0120sau", - "\u0120gaat", - "\")", - "\u0120transmit", - "\u0120publishing", - "\u0120ranking", - "\u0120offense", - "\u012046", - "pin", - "\u0120Taking", - "\u0120entitled", - "\u0120genuinely", - "\u0120variations", - "\u0120finde", - "\u0120tau", - "\u0120unfortunate", - "\u0120Rah", - "ports", - "\u0120c\u00c5", - "\u0120monkey", - "\u0120brac", - "wei", - "lung", - "\u0120artif", - "\u0120syrup", - "\u0120\u00d0\u0136\u00d0\u00b0\u00d0\u00b2", - "\u0120lifted", - "\u0120chez", - "\u0120Advent", - "\u0120Stock", - "\u0120dol", - "\u00d0\u00bc\u00d0\u00b5\u00d0\u00bd", - "\u00d0\u00b8\u00d1\u012a\u00d1\u012e", - "\u0120yn", - "gio", - "det", - "\u0120desse", - "\u0120gri", - "\u0120Chairman", - "\u00e7\u0127", - "\u0120cuenta", - "anim", - "\u0120crab", - "\u0120escal", - "\u0120premi\u00c3\u00a8re", - "\u0120Gef", - "\u0120dining", - "\u0120seventh", - "\u0120chasing", - "\u0120Tower", - "\u0120brutal", - "\u0120fundamentally", - "\u00e3\u0123\u00a8\u00e3\u0123\u0128", - "\u00d0\u00bb\u00d0\u00b5\u00d0\u00bd\u00d0\u00b8\u00d1\u0131", - "stage", - "\u0120acquis", - "\u0120cylinder", - "\u0120commander", - "mem", - "\u0120UV", - "happy", - "\u0120epsilon", - "\u0120invitation", - "\u0120farmer", - "chair", - "\u0120destiny", - "\u0120sovere", - "\u0120Hebrew", - "\u0120servant", - "\u0120bew", - "\u0120gast", - "uties", - "\u0120administrative", - "\u0120Command", - "\u00c3\u00a9ta", - "\u0120nitrogen", - "\u00ea\u00b7\u00bc", - "\u0120abi", - "\u0120villain", - "\u0120blanket", - "\u0120Send", - "\u0120beaten", - "\u00b2\u0126", - "\u0120volunt", - "\u0120scholar", - "\u0120Emperor", - "\u012043", - "vable", - "\u0120Dus", - "\u0120GU", - "\u0120targeting", - "www", - "\u0120amendment", - "\u00ec\u0128\u012e\u00eb", - "\u0120ting", - "\u0120nasty", - "\u0120gauge", - "\u0120\u00d1\u0122\u00d0\u00be\u00d0\u00b4", - "\u0120Hans", - "Your", - "\u00ce\u00b1\u00ce\u00bd", - "\u0120projet", - "\u0120Hawaii", - "\u0120suspicious", - "\u0120schw", - "\u0120removal", - "\u0120intrig", - "\u0120MU", - "\u0120ponto", - "\u00e0\u00a4\u00be", - "\u0120\u00d0\u00be\u00d0\u00b1\u00d1\u0122\u00d0\u00b0\u00d0\u00b7", - "\u0120guessing", - "pace", - "\u0120mothers", - "\u0120millimeter", - "\u00d0\u00bb\u00d0\u00b5\u00d0\u00bd\u00d0\u00b8\u00d0\u00b5", - "\u00e6\u00b2\u00a1\u00e6\u013e\u012b", - "\u0120availability", - "icz", - "\u00e6\u0143\u00a4", - "\u0120fract", - "\u0120bases", - "km", - "\u0120BTS", - "\u0120Field", - "\u0120dzie", - "\u0120segundo", - "\u0120\u00eb\u0124\u013a\u00eb\u012c\u0136", - "\u0120legitimate", - "imas", - "\u0120\u00d0\u00b2\u00d0\u00bd", - "\u0120corruption", - "\u0120smash", - "\u0120Valent", - "\u0120aligned", - "\u0120Pennsylvania", - "\u0120gab", - "\u0120Eun", - "enth", - "\u0120Morning", - "\u0120candle", - "\u0120backpack", - "\u0120Islamic", - "a\u00c3\u00a7\u00c3\u00b5es", - "\u0120encry", - "\u0120mushrooms", - "\u00ed\u012e\u012e", - "dit", - "\u0120transit", - "\u0120Wisconsin", - "\u0120participated", - "\u0120Ils", - "\u0120unfold", - "\u00b6\u0122\u00eb", - "\u0120profits", - "\u0120warming", - "\u0120Gang", - "\u0120networking", - "\u0120mega", - "\u0120thoroughly", - "lements", - "\u0120Hm", - "\u0120deciding", - "\u0120emotionally", - "\u0120exhausted", - "\u0120\u00d0\u0141\u00d0\u00be\u00d1\u0124", - "cido", - "\u0120HTML", - "\u0120copyright", - "\u0120melody", - "yim", - "\u0120anders", - "oshop", - "\u0120\u00eb\u00b3\u00bc", - "\u0120athlete", - "\u0120GE", - "\u0120frequent", - "\u0120desires", - "\u0120needing", - "\u0120Yun", - "\u0120rifle", - "\u0120lover", - "'T", - "\u0120dense", - "\u0120t\u00c3\u00a3o", - "\u0120notified", - "\u0120idi", - "\u00ec\u0139\u0143", - "\u00ed\u0128", - "\u0120interacting", - "\u0120rapport", - "\u00d0\u00b5\u00d1\u0122\u00d0\u00b8", - "ski", - "\u0120besser", - "\u0120manufacturer", - "\u0120Kyle", - "\u0120accountable", - "\u0120Sak", - "\u0120Pil", - "\u0120Domin", - "\u0120presum", - "\u0120\u00d0\u0134\u00d1\u0123\u00d0\u00b5", - "\u0120vinegar", - "\u0120guaranteed", - "\u00e7\u013e\u012d\u00e5\u012a\u00b0", - "\u0120handled", - "\u00e9\u0141\u00b3", - "cat", - "\u0120civilization", - "\u0120accomp", - "\u0120VM", - "\u00c3\u00a9mon", - "\u0120deze", - "\u0120grades", - "\u0120sollte", - "\u0120staring", - "\u00d7\u0132\u00d7\u00aa", - "arnt", - "\u0120horizon", - "\u0120travail", - "hour", - "\u00e7\u00ac\u00ac\u00e4\u00b8\u0122", - "\u0120ED", - "\u0120Dak", - "\u0120ny", - "\u0120conve", - "\u0120Cham", - "\u0120firms", - "\u0120Liu", - "\u0120\u00d1\u0123\u00d1\u0124\u00d1\u0122\u00d0\u00b0\u00d0\u00bd", - "\u0120libert", - "\u0120lenses", - "\u0120intake", - "\u0120\u00d0\u00b2\u00d1\u012d\u00d0\u00b1", - "\u0120mensen", - "hel", - "\u0120practition", - "\u0120350", - "\u00e3\u0124\u00b3", - "FO", - "\u0120beds", - "\u0120ancestors", - "\u0120\u00ec\u0139\u0126\u00ec\u00b2\u0143", - "\u0120disturb", - "\u0120Lastly", - "\u0120Support", - "\u00e0\u00b8\u00b5\u00e0\u00b9\u012b", - "\u0120Corona", - "\u0120enthusi", - "\u0120\u00d0\u00b2\u00d0\u00be\u00d0\u00b7\u00d0\u00bc", - "\u0120\u00ec\u0124\u00ac\u00eb\u0140\u012e\u00eb", - "\u012052", - "bird", - "\u0120reduces", - "\u0120\u00ec\u0140\u012a\u00ec\u013f\u0126", - "\u0120Gene", - "\u00ea\u00b5\u0132", - "\u00c4\u013bp", - "\u0120\u00c3\u013eber", - "\u0120concerning", - "user", - "\u0120concentrate", - "\u0120WHAT", - "ishop", - "onymous", - "nold", - "\u0120suggesting", - "\u00a9\u00b0", - "\u0120Fish", - "........", - "\u0120vessel", - "\u0120trabajo", - "\u00e3\u0123\u00b5", - "\u0120Ocean", - "\u00e5\u00a7\u0132", - "yg", - "\u0120towns", - "del", - "\u0120terrifying", - "\u0120\u00c3\u00a7al\u00c4\u00b1\u00c5\u0141", - "\u0120sino", - "\u0120eats", - "\u0120gez", - "\u0120geme", - "\u0120\u00ec\u013b\u0126", - "\u0120compart", - "\u0120implementing", - "\u0120Potter", - "\u0120Germans", - "\u0120g\u00c5\u0124", - "\u0120tennis", - "\u0120carpet", - "auer", - "\u0120Saudi", - "yeong", - "\u0120curry", - "\u0120Forest", - "\u00d1\u012d\u00d0\u00bb", - "\u0120fifteen", - "\u0120bolts", - "\u0120{\\", - "\u00ac\u00b4", - "\u0120settlement", - "\u0120lange", - "\u0120bam", - "Get", - "\u00ed\u0137\u013b", - "\u0120swap", - "\u0120Khan", - "\u0120commence", - "\u0120quarantine", - "\u0120scored", - "\u00e7\u0138", - "\u01201950", - "\u0120thicker", - "\u0120s\u00c3\u00bbr", - "\u00e5\u0131\u00a3", - "\u0120Larry", - "\u0120allez", - "\u00ec\u012d\u013e\u00eb\u012c\u0136", - "\u0120g\u00c3\u00bc", - "\u0120spectacular", - "//", - "both", - "\u0120stats", - "\u00e5\u00a6\u00b3", - "\u0120Nancy", - "\u0120bunu", - "\u0120crust", - "\u0120activated", - "\u0120\u00ea\u00b7\u00b8\u00eb\u0140", - "outhe", - "\u0120ports", - "\u0120neural", - "\u0120jaw", - "\u0120observations", - "\u0120voit", - "aban", - "\u00e1\u00ba\u00a3i", - "\u00a6\u00ac\u00eb\u00a5\u00bc", - "omes", - "\u00e0\u00af\u012d", - "qui", - "\u0120kindness", - "\u00d0\u0133", - "\u012041", - "\u0120moderate", - "\u0120angels", - "\u0120Tamb", - "\u00c3\u00a8t", - "\u0120chlor", - "\u0120Billy", - "\u00ec\u00b2\u013a\u00eb", - "acon", - "\u0120selecting", - "\u0120Delta", - "\u0120null", - "denly", - "\u0120ciud", - "\u0120tendency", - "\u0120breakdown", - "\u0120mint", - "\u00d1\u0126\u00d0\u00be\u00d1\u0122\u00d0\u00bc", - "orph", - "\u0120dawn", - "spr", - "\u0120WILL", - "\u00c3\u00a4chlich", - "\u0120puppy", - "700", - "\u0120\u00e0\u00ae\u00a4", - "\u0120fails", - "\u0120Conc", - "\u0120relatives", - "\u0120inviting", - "\u0120autonom", - "\u0120composed", - "\u0120unity", - "\u0120decis", - "\u0120accessories", - "\u0120Cass", - "\u0120bist", - "\u0120Tip", - "\u00ec\u00a7\u00b8", - "\u0120punt", - "\u0120r\u00c3\u00a1p", - "\u00e9\u0122\u00b2", - "ANK", - "\u00e3\u0123\u013c", - "exist", - "\u0120compatible", - "\u0120ner", - "\u0120\u00d0\u00b5\u00d0\u00bc\u00d1\u0125", - "\u0120aplic", - "\u0120bapt", - "\u0120failing", - "\u0120Tamam", - "\u0120oscill", - "\u0120letzten", - "\u0120repeatedly", - "\u0120jungle", - "\u0120Push", - "hai", - "\u0120\u00ce\u00b7", - "\u0120deadly", - "\u00d1\u0131\u00d0\u00b6", - "wi\u00c4\u0127", - "\u0120Common", - "\u0120\u00ce\u0137", - "\u0120skate", - "TC", - "\u0120Mini", - "\u0120hobby", - "\u00e1\u00ba\u00a7n", - "\u0120routes", - "\u0120amigos", - "\u0120conjun", - "\u0120partnerships", - "\u0120novo", - "\u0120aver", - "\u0120pouvez", - "bridge", - "\u0120preoc", - "him", - "\u0120turb", - "\u0120sob", - "\u0120Snap", - "\u0120\u00ec\u00b0\u00b8", - "minute", - "\u0120traject", - "uj\u00c4\u013b", - "\u0120eager", - "\u0120regulatory", - "\u0120banking", - "bling", - "\u00d1\u012a\u00d1\u012e", - "a\u00c5\u00bc", - "\u0120bizarre", - "itated", - "dire", - "\u0120threatened", - "\u0120shining", - "\u0120nesse", - "\u0120corps", - "\u0120\u00d1\u0123\u00d1\u0125", - "\u0120teles", - "\u0120temp", - "tem", - "\u0120\u00d0\u00ba\u00d0\u00b0\u00d0\u00bd", - "\u0120fever", - "New", - "\u0120heavier", - "\u0120Sah", - "bud", - "\u0120outros", - "\u0120\u00ec\u00b0\u00be", - "\u0120\u00eb\u00aa\u0127", - "arring", - "\u0120\u00ea\u00b4\u013e\u00ec\u00b0\u00ae", - "\u0120Nap", - "\u0120semin", - "\u0120Than", - "ifs", - "\u0120desen", - "\u0120\u00d1\u0124\u00d0\u00b0\u00d0\u00ba\u00d0\u00be\u00d0\u00b5", - "\u0120loses", - "\u0120Balt", - "kon", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d0\u00bf\u00d1\u0122", - "\u0120vois", - "\u0120Moscow", - "\u0120chairs", - "his", - "\u0120refugees", - "kg", - "\u0120kole", - "\u012f\u00a8", - "\u00d0\u00b0\u00d1\u0123\u00d0\u00b8\u00d0\u00b1\u00d0\u00be", - "\u00a6\u00bd", - "\u0120Universe", - "\u0120Direct", - "\u0120cheating", - "\u0120Cin", - "\u0120patri", - "\u0120advise", - "\u0120Nether", - "\u0120primeiro", - "\u0120mentioning", - "nut", - "56", - "ar\u00c4\u00b1", - "\u0120petite", - "bled", - "\u0120pensar", - "icio", - "IND", - "\u0120veteran", - "\u0120ladder", - "\u0120consequence", - "\u00d0\u00be\u00d0\u00b6\u00d0\u00b0\u00d0\u00bb", - "\u0120Burn", - "\u0120rug", - "\u0120Made", - "\u0120git", - "\"...", - "\u0120competitors", - "\u0120przed", - "\u0120apparent", - "\u0120Argentina", - "\u0120Working", - "\u0120collaborate", - "woman", - "\u0120retain", - "\u0120leurs", - "\u0120dashboard", - "\u00d7\u013b\u00d7\u0135", - "\u0120Early", - "BM", - "\u0120\u00d0\u00b5\u00d1\u0133", - "\u00d0\u00be\u00d0\u00bb\u00d0\u00be\u00d0\u00b3", - "\u0120satisfying", - "\u0120oftentimes", - "\u0120mapping", - "\u00c3\u00bcnk\u00c3\u00bc", - "arth", - "fold", - "\u0120launching", - "\u0120aura", - "\u0120precision", - "works", - "God", - "\u0120strap", - "\u0120Imper", - "\u0120rivers", - "\u0120|", - "\u0120cuer", - "regon", - "\u0120arrival", - "\u00d0\u00ba\u00d0\u00b0\u00d1\u0127", - "\u0120Miami", - "\u00d0\u00b0\u00d0\u00bd\u00d1\u012d", - "\u0120survivors", - "\u0120Senior", - "David", - "\u0120estado", - "\u0120sectors", - "\u0120popping", - "\u0120chim", - "ay\u00c4\u00b1", - "\u0120kunnen", - "\u0120gallery", - "\u0120sunlight", - "esehen", - "\u0120yelling", - "\u0120Mein", - "\u0120Phoenix", - "\u0120mano", - "\u0120historia", - "\u0120occurring", - "\u00e6\u00ac\u00b8", - "\u00ec\u00b8", - "\u00d0\u00b0\u00d0\u00b4\u00d0\u00b8", - "\u00e5\u00be\u0127", - "\u0120institutional", - "\u0120Tut", - "\u00e7\u00b2", - "\u0120slaves", - "\u00e3\u0123\u00a9\u00e3\u0123\u0128", - "\u0120forgiveness", - "\u0120twin", - "\u0120Hyun", - "\u00d0\u00bd\u00d1\u012e", - "\u0120Komm", - "andra", - "shot", - "ss\u00c3\u00a4", - "\u0120\u00d1\u0128\u00d0\u00b5", - "atta", - "\u0120expense", - "\u0120GPU", - "\u0120Past", - "ribly", - "\u0120\u00eb\u0143\u0132\u00ec\u0137\u00bc", - "\u0120\u00d0\u00b3\u00d0\u00be\u00d0\u00b4\u00d0\u00b0", - "\u0120respir", - "\u00e6\u013f\u00b1", - "\u0120Queens", - "hops", - "\u0120s\u00c3\u00a9rie", - "\u0120pref", - "\u0120comed", - "\u0120plut", - "\u0120Overall", - "\u0120\u00e3\u0123\u013f", - "\u0120cush", - "\u0120ringing", - "\u0120incorrect", - "\u0120\u00d1\u0123\u00d1\u0124\u00d1\u0122", - "\u0120geometry", - "\u0120advertis", - "\u0120\u00d0\u00a8", - "\u0120reviewed", - "\u00e3\u0123\u0124\u00e3\u0123\u0124", - "\u0120dozens", - "\u0120determination", - "\u0120Phill", - "\u0120contributed", - "\u0120Cit", - "\u0120passengers", - "\u0120c\u00c3\u00b4t\u00c3\u00a9", - "\u0120rever", - "\u0120technological", - "\u0120allen", - "\u0120raining", - "avi", - "\u0120salty", - "\u0120typing", - "\u0120\u00d1\u0124\u00d0\u00b5", - "\u0120tilt", - "\u0120\u00ec\u00b9\u013a", - "\u0120\u00d0\u00be\u00d1\u0122", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d1\u0131\u00d0\u00bc", - "\u0120rou", - "\u0120arena", - "arat", - "\u00e5\u012a\u00ab", - "HHHH", - "\u0120manufacturers", - "\u0120Edward", - "\u0120tuck", - "\u0120blows", - "ingo", - "\u0120Marc", - "\u00ec\u0137\u0126\u00ec\u0126\u013e", - "Mich", - "\u0120Clean", - "\u00e8\u00b4", - "esto", - "\u0120Pack", - "\u0120shaft", - "BRUNO", - "\u0120aven", - "uur", - "\u00d1\u0123\u00d0\u00ba\u00d0\u00be\u00d0\u00bb\u00d1\u012e\u00d0\u00ba\u00d0\u00be", - "\u00ea\u00b4\u0122", - "\u0120automated", - "\u0120venture", - "\u0120surveillance", - "\u0120Grow", - "\u0120Emer", - "\u0120\u00d0\u00b4\u00d0\u00be\u00d1\u0122", - "\u0120investor", - "\u0120Yok", - "\u0120latter", - "\u0120NI", - "\u0120functioning", - "\u0120Hamilton", - "\u012051", - "\u0120murdered", - "\u0120anchor", - "\u0120cuc", - "\u0120SCP", - "\u0120Madam", - "\u0120constraints", - "\u0120barn", - "anken", - "\u0120\u00eb\u00a7\u0130\u00ec\u013f\u0122", - "\u0120Motor", - "\u0120Doing", - "\u0120amen", - "etts", - "\u0120instructor", - "egt", - "ako", - "\u0120posture", - "ivia", - "\u0120Polish", - "\u0120\u00d0\u00b4\u00d0\u00b2\u00d0\u00b0", - "\u0120colorful", - "\u0120elbow", - "\u0120parle", - "\u0120passer", - "\u0120condem", - "ortal", - "\u0120fertil", - "\u00d8\u00a7\u00d8\u00af", - "\u0120Colomb", - "\u0120alignment", - "\u0120astronaut", - "\u0120Mut", - "\u0120salmon", - "\u0120structured", - "\u0140\u00d7\u00a8", - "\u0120clicks", - "\u0120miej", - "\u00e6\u0136\u00bf", - "\u00e3\u0123\u0126\u00e3\u0124\u0126", - "\u0120Round", - "\u0120rainbow", - "\u0120VA", - "\u00e3\u0123\u0136\u00e3\u0123\u0138", - "\u00ec\u00a7\u012a", - "otz", - ",", - "\u0120chords", - "\u0120Sanders", - "\u0120\u00eb\u00b6\u0126\u00eb", - "Ben", - "\u0120dar\u00c3\u00bcber", - "ilians", - "\u0120ordering", - "\u0120Manh", - "\u0120kilogram", - "\u0120kar\u00c5\u0141", - "\u0120grasp", - "\u0120ghosts", - "alen", - "\u0120Jedi", - "\u0120\u00d0\u00b1\u00d0\u00bb\u00d0\u00b8", - "\u0120downloaded", - "\u0120conducting", - "\u0120Hak", - "\u0120researcher", - "ilan", - "good", - "\u0120Hannah", - "\u0120d\u00c3\u00bc\u00c5\u0141\u00c3\u00bcn", - "\u0120Messiah", - "uity", - "iona", - "\u0120probable", - "\u0120YE", - "\u0120independently", - "\u0120buffer", - "burn", - "ourd", - "\u0120McK", - "\u0120lingu", - "ujemy", - "\u00d0\u00b5\u00d1\u0122\u00d1\u0124", - "\u0120intuitive", - "\u0120cracks", - "appropri", - "nty", - "\u0120geen", - "\u0120lend", - "\u0120certification", - "IDS", - "unter", - "pees", - "\u0120trump", - "\u0120bankrupt", - "\u0120feas", - "\u00e8\u0139", - "\u0120du\u00c5\u00bc", - "\u00e6\u00b8\u0127", - "\u0120viruses", - "\u012058", - "god", - "\u0120\u00d0\u00b6\u00d0\u00b5\u00d0\u00bb", - "\u0120stalk", - "Ind", - "achi", - "\u0120CF", - "\u0120Cond", - "\u0120sanct", - "\u0120conten", - "\u0120freed", - "\u0120RT", - "\u0120mentors", - "\u00ec\u00a1\u00b1", - "\u0120portable", - "\u0120Paulo", - "rane", - "HAHA", - "\u0120Section", - "\u00e7\u0128", - "hyun", - "\u0120\u00ce\u0143\u00cf\u0129", - "\u0120Pub", - "\u0120Independ", - "\u0120compounds", - "\u0120\u00d1\u0123\u00d1\u012d", - "\u0120messaging", - "\u0120dedication", - "\u0120noticing", - "\u0120devoted", - "\u00d1\u0130\u00d1\u0124\u00d1\u0123\u00d1\u0131", - "\u0120snakes", - "\u0120battlefield", - "pers", - "\u0120dela", - "92", - "\u0120hai", - "ill\u00c3\u00a4", - "\u00c3\u00a9rer", - "every", - "\u0120responsive", - "\u00d7\u013b\u00d7\u0137", - "opf", - "\u00e9\u012b", - "\u012c\u00b8", - "Because", - "\u0120tourism", - "\u0120\u00ea\u00b7\u00b8\u00ea\u00b2\u012e", - "\u00d7\u0137\u00d7\u00a6", - "\u0120cans", - "st\u00c3\u00bct", - "\u0120donne", - "\u0120Dios", - "\u0120Uber", - "actory", - "\u0120oriented", - "\u0120Herm", - "\u0120patron", - "urf", - "bei", - "\u0120programa", - "\u0120Ohh", - "gener", - "\u0120fist", - "\u0120Wendy", - "\u0120anda", - "\u0120guessed", - "\u0120freak", - "\u00e4\u00b8\u0143\u00e5\u013e\u012d", - "\u0120Kings", - "chool", - "\u0120offline", - "\u0120Indiana", - "\u0120Alliance", - "\u012053", - "\u0120particul", - "\u0120Focus", - "\u0120inhabit", - "\u0120\u00ea\u00b0\u013b\u00ec\u013f\u0122\u00eb\u012f\u00b0", - "\u0120McG", - "owski", - "\u0120\u00ec\u013f\u00b4\u00ea\u00b1\u00b4", - "\u0120pa\u00c5\u0126st", - "\u00d0\u00be\u00d0\u00bd\u00d0\u00b8", - "itta", - "\u0120confirmation", - "\u0120Brooklyn", - "\u0120noodle", - "fund", - "itud", - "\u0120grandparents", - "\u0120barbecue", - "\u00ce\u00b5\u00ce\u00b9\u00cf\u0124", - "\u0120\u00e1", - "\u0120ballot", - "\u0120Veter", - "\u0120pipes", - "igious", - "\u0120Graph", - "ested", - "\u0120\u00eb\u00b8\u012e\u00eb", - "\u0120KE", - "\u00e3\u0123\u00a1\u00e3\u0124\u0129\u00e3\u0123\u00a3\u00e3\u0123\u00a8", - "\u0120eins", - "\u0120hatred", - "\u00e3\u0123\u0133\u00e3\u0123\u00a9", - "\u0120dang", - "eeee", - "\u0120archae", - "\u0120Jesse", - "\u0120detected", - "\u0120seni", - "burgh", - "\u0120displacement", - "\u0120dop", - "\u0120conditioning", - "\u0120\u00d0\u00bd\u00d0\u00b5\u00d1\u0123\u00d0\u00ba\u00d0\u00be\u00d0\u00bb\u00d1\u012e\u00d0\u00ba\u00d0\u00be", - "\u0120disturbing", - "PH", - "\u0120thinner", - "\u0120wounded", - "\u0120Cuando", - "\u0120cushion", - "\u0120whites", - "\u0120preferences", - "\u0120\u00ec\u00a4\u0122\u00eb\u00b9\u0126", - "\u0120ka\u00c5\u00bc", - "\u0120Gate", - "\u0120Path", - "dles", - "\u00e0\u00b8\u0126\u00e0\u00b8\u00a3", - "imore", - "\u0120\u00eb\u00b3\u00b4\u00ec\u0139\u00ac", - "\u0120disciplines", - "\u00e1\u00bb\u0131", - "\u0120mesma", - "\u0120\u00ec\u0125\u012a\u00eb", - "\u0120\u00ec\u012d\u00ac", - "\u0120ging", - "\u0120umbrella", - "IGHT", - "\u0120pension", - "\u0120combining", - "SS", - "\u0120rectangle", - "\u00e1\u00bb\u0129t", - "\u0120proxim", - "\u0120Cow", - "\u00b8\u012e", - "\u0120intentional", - "\u00e6\u0137\u013b", - "\u0120decid", - "\u0120\u00d1\u0123\u00d0\u00ba\u00d0\u00b0\u00d0\u00b6", - "\u0120Uma", - "iasm", - "buz", - "\u0120debris", - "\u0120cass", - "\u0120Prop", - "iska", - "\u00eb\u0142\u00a5", - "esterol", - "ussian", - "\u00ec\u013f\u00b4\u00eb\u0140\u0133", - "\u0120unlimited", - "\u0120admire", - "\u0120tightly", - "\u0120genome", - "\u0120Junior", - "venir", - "gus", - "\u0120c\u00c4\u0125", - "\u0120Vlad", - "\u0120\u00ed\u0124", - "\u0120relativ", - "inci", - "\u0120aunque", - "\u0120Boys", - "\u00d1\u0128\u00d0\u00b8\u00d0\u00be\u00d0\u00bd", - "\u0120Swiss", - "\u0120physicians", - "\u0120\u00ed\u0131\u012b", - "\u0120PET", - "\u0120wounds", - "about", - "\u00c3\u0142i", - "onz", - "urities", - "\u0120\u00d1\u0125\u00d0\u00b2\u00d0\u00b8\u00d0\u00b4", - "\u00e5\u00b7\u00a6", - "\u0120mentality", - "\u0120variance", - "\u0120segunda", - "\u0120volcano", - "alie", - "\u00e0\u00a5\u0129", - "\u0120tiles", - "\u0120Terry", - "\u0120\u00d8\u00a7\u00d9\u0126\u00d9\u0126\u00d9\u0129", - "\u0120canon", - "\u0120scattered", - "pton", - "\u0120definitions", - "\u0120algebra", - "oten", - "ablo", - "ijuana", - "\u0120wrapping", - "\u0120sesame", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d1\u0129\u00d0\u00b8\u00d0\u00bd\u00d0\u00b0", - "\u0120Alf", - "\u0120\u00d0\u0142\u00d0\u00be\u00d1\u0123\u00d1\u0123", - "orno", - "\u0120ankle", - "\u0120specialty", - "\u0120attempting", - "iliation", - "\u01201920", - "\u0120phenomena", - "\u0120Product", - "\u0120Buck", - "\u0120Aww", - "seen", - "\u0120void", - "\u0120Franklin", - "\u0120advocacy", - "\u0120Sep", - "\u0120coolest", - "\u0120\u00d1\u0123\u00d1\u0122\u00d0\u00b0\u00d0\u00b7\u00d1\u0125", - "\u0120Quand", - "\u0120900", - "\u0120Trad", - "dies", - "\u0120hash", - "\u00e6\u012a\u0133\u00e5\u00b0\u00b1", - "\u00e4\u00b9\u0141\u00e6\u013a\u00af", - "\u0120pots", - "\u0120sadly", - "\u0120viable", - "\u0120Tiger", - "\u0120ONE", - "\u0120neurons", - "owanie", - "\u00c4\u0139", - "\u0120Shar", - "\u0120Landes", - "\u0120conferences", - "\u00e8\u00a9\u00b2", - "\u0120credential", - "\u0120lime", - "inee", - "xit", - "pay", - "\u0120incons", - "\u0120>>:", - "\u00e8\u00aa\u012f", - "\u0120\u00ed\u0140\u013a\u00eb", - "\u0120lesser", - "\u0120spill", - "\u0120premise", - "\u0120365", - "\u0120Host", - "\u0120tomar", - "\u00d7\u0132\u00d7\u013e", - "\u00eb\u00b2\u012a", - "\u0120Whats", - "\u0120lightweight", - "\u0120Map", - "fia", - "ellschaft", - "\u0120vendors", - "uesto", - "\u0120Mister", - "\u0120\u00d0\u0141\u00d1\u0122\u00d0\u00b8", - "\u00e5\u0131\u00b3", - "hma", - "\u0120intentionally", - "\u0120Tang", - "\u00e9\u0139\u00ae", - "\u0120identification", - "\u0120etcetera", - "\u0120Nee", - "\u0120\u00d1\u0124\u00d1\u0122\u00d0\u00b8", - "\u00ea\u00b7\u00b8", - "\u0120cryptocur", - "\u0120inhale", - "\u0120addict", - "\u00e5\u0132\u0126\u00e4\u00bd\u012f", - "\u0120mau", - "\u0120\u00d1\u0124\u00d0\u00b0\u00d0\u00ba\u00d0\u00b0\u00d1\u0131", - "\u0120\u00eb\u00b2\u0126", - "\u0120comprar", - "iedzie\u00c4\u0129", - "\u0120\u00d0\u00be\u00d1\u0124\u00d0\u00bd\u00d0\u00be", - "\u0120beginner", - "\u0120\u00d0\u00bc\u00d1\u0125\u00d0\u00b6", - "\u0120obsc", - "\u0120limiting", - "ascular", - "\u0120inspection", - "aci", - "\u0120rejo", - "Mus", - "\u0120zaten", - "\u0120szcz", - "\u0120Madrid", - "\u0120varieties", - "\u0120est\u00c3\u0142", - "\u0120Shakes", - "\u0120kits", - "\u0120administer", - "\u0120lava", - "\u0120g\u00c3\u00a5", - "\u00e8\u00a9\u00a6", - "\u00d7\u00aa\u00d7\u013b", - "\u0120Wayne", - "\u0120instagram", - "\u0120rated", - "paper", - "\u0120bild", - "\u0120pretending", - "\u0120observing", - "\u0120\u00d1\u0123\u00d0\u00b0\u00d0\u00bc\u00d0\u00be\u00d0\u00bc", - "\u0120tror", - "\u0120organisms", - "\u0120falta", - "\u0120hometown", - "\u00e7\u00b1", - "\u0120\u00ed\u012d", - "\u0120cheg", - "\u0120\u00ec\u00a1", - "\u0120comma", - "is\u00c3\u00a9", - "\u0120likelihood", - "avored", - "\u0120geldi", - "\u00d0\u00bd\u00d0\u00b8\u00d0\u00ba\u00d0\u00be\u00d0\u00b2", - "\u0120medio", - "\u0120jakie", - "\u0120Jup", - "\u0120greenhouse", - "\u0120spit", - "\u00d0\u00ba\u00d0\u00be\u00d0\u00b5", - "\u0120\u00d0\u00ba\u00d0\u00b0\u00d0\u00b6", - "\u0120Gram", - "\u0120Conference", - "\u0120deficit", - "s\u00c4\u00b1n", - "inse", - "u\u00c4\u0141", - "\u0120richt", - "\u0120coincidence", - "\u00e5\u0131\u012f", - "\u0120europ", - "\u0120butterfly", - "pread", - "\u0120\u00ec\u0138\u00bc", - "\u00e8\u0122\u00b6", - "\u0120wavel", - "\u0120Infin", - "\u0120Planet", - "\u0120selfie", - "ientras", - "\u0120arrog", - "oser", - "idal", - "\u0142\u00d7\u0139\u00d7\u0142\u00d7\u0137", - "\u00c3\u00bct\u00c3\u00bcn", - "\u0120freshman", - "\u0120Machine", - "\u00cf\u0125\u00cf\u0126", - "\u0120Dia", - "\u00ec\u013f\u00b4\u00eb\u012d\u00a4", - "\u00e3\u0123\u0135\u00e3\u0123\u0128", - "nea", - "\u0120listing", - "\u0120configure", - "utor", - "Up", - "tschaft", - "ri\u00c3\u00a8re", - "\u0120upwards", - "\u0120\u00d1\u0127\u00d0\u00be\u00d1\u0129\u00d1\u0125", - "\u0120sweep", - "Br", - "\u0120expressing", - "\u0120unhappy", - "\u0120mandatory", - "gender", - "\u0120A\u00c3\u0143", - "\u0120indicators", - "\u0120oils", - "note", - "\u0120segur", - "\u00d0\u00be\u00d0\u00b6\u00d0\u00b5\u00d1\u0124", - "ynasty", - "\u0120distances", - "\u0120merge", - "BERT", - "\u0120surrender", - "\u0120buat", - "\u0120Awards", - "\u0120se\u00c3\u00b1or", - "odox", - "\u0120flavour", - "\u0120abdom", - "\u0120configur", - "86", - "\u0120DIY", - "\u0120rigid", - "\u00b0\u013a", - "\u0120corporation", - "\u0120groom", - "jaw", - "\u0120Near", - "\u00d0\u00b8\u00d0\u00bb\u00d0\u00be", - "\u0120opera", - "\u0120Innov", - "\u00d0\u00b8\u00d1\u0122\u00d0\u00b0", - "\u0135\u00b1", - "\u0120specified", - "\u0120cosm", - "\u0120Freedom", - "\u0120clown", - "\u0120Nem", - "\u0120\u00d0\u00b2\u00d0\u00be\u00d0\u00bb", - "\u00d1\u0133\u00d0\u00bd", - "\u0120charger", - "\u00e0\u00b9\u0123\u00e0\u00b8\u00a5", - "\u0120influential", - "\u00c3\u00a4sident", - "\u00e9\u00a4", - "\u0120\u00ec\u0126\u0142\u00eb", - "\u0120volumes", - "\u00e6\u0132", - "\u0120outras", - "\u0120Twitch", - "\u0120founding", - "\u0120awhile", - "\u0120coil", - "\u00ea\u00b0\u013b", - "\u0120c\u00e1\u00ba\u00a3", - "\u0120Throw", - "\u0120Hence", - "ommt", - "\u0120Benjamin", - "\u00d0\u00b3\u00d0\u00bb\u00d1\u0131\u00d0\u00b4", - "Time", - "obic", - "\u0120mour", - "\u0120dread", - "\u0120L\u00c3\u0142", - "\u0120Chile", - "\u0120preval", - "\u0120vain", - "\u0120art\u00c4\u00b1k", - "\u0120preserved", - "\u0120\u00d0\u00be\u00d1\u0124\u00d0\u00b4", - "\u0120warehouse", - "\u0120beste", - "\u0120Several", - "\u0120Situation", - "\u0120cardboard", - "Tod", - "erna", - "\u0120garant", - "\u0120gesture", - "\u0120hen", - "\u0120spelling", - "osexual", - "\u0120anne", - "\u0120mice", - "\u0120Meine", - "card", - "\u0120rebell", - "\u0120certo", - "\u0120\u00ec\u013e\u0142\u00eb", - "\u0120verschied", - "\u0120Bos", - "\u0120invention", - "\u0120trze", - "\u0120mani\u00c3\u00a8re", - "\u0120Chad", - "\u0120spre", - "\u0120organisations", - "\u0120poorly", - "\u0120anterior", - "\u0120stair", - "\u00d0\u00ba\u00d1\u0122", - "\u0120atomic", - "\u0120sympath", - "\u0120continually", - "\u0120kleine", - "\u00c3\u00a8te", - "\u00d0\u00b8\u00d1\u012b", - "\u00ce\u00bf\u00cf\u0124", - "peut", - "\u0120reposit", - "\u0120entra", - "Em", - "\u0120financing", - "\u0120\u00d0\u00bc\u00d0\u00bd\u00d0\u00be\u00d0\u00b3", - "\u0120thesis", - "\u0120Computer", - "eau", - "\u0120Tree", - "\u0120bride", - "onsieur", - "shire", - "wic", - "DE", - "\u0120\u00ec\u012a\u013a\u00eb", - "\u0120acom", - "\u0120PO", - "ersch", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00bc\u00d0\u00be\u00d1\u012b", - "\u0120Armen", - "\u0120\u00ec\u00a3\u00bd", - "\u0120zor", - "\u0120prints", - "\u0120Dass", - "\u00e6\u00b8\u00af", - "\u0120durable", - "\u0120Transport", - "\u00ec\u0140\u0132\u00ea\u00b0\u0122", - "\u0120\u00d0\u00bb\u00d0\u00b5\u00d0\u00b3", - "\u0120d\u00c3\u00a9t", - "\u00c3\u00b4le", - "amous", - "YN", - "\u0120cliff", - "\u0120grammar", - "\u0120\u00d0\u0141\u00d0\u00be\u00d1\u012f\u00d1\u0124\u00d0\u00be\u00d0\u00bc\u00d1\u0125", - "\u0120l\u00c3\u0142m", - "esch", - "\u0120miserable", - "\u0120volts", - "\u0120Cad", - "ukan", - "\u00d1\u0124\u00d0\u00b8\u00d0\u00b2", - "rust", - "\u0120\u00ec\u013a\u00ac\u00eb\u013f\u00bc", - "\u0120verk", - "\u0120chickens", - "\u0120Yoo", - "\u0120outfits", - "code", - "\u0120hierarchy", - "netes", - "\u0120counterpart", - "\u0120t\u00c3\u00b4i", - "\u0120ted", - "\u0120Bart", - "\u0120\u00eb\u013f\u00bc", - "\u0120Genau", - "\u0120incoming", - "\u0120ABC", - "rique", - "\u0120\u00d0\u00be\u00d1\u0124\u00d0\u00bf", - "qual", - "\u0120incentive", - "\u0120ihren", - "\u00d7\u0142\u00d7\u013b", - "loe", - "\u01201930", - "\u0120barg", - "\u0120diction", - "\u0120\u00c3\u00b6nce", - "INS", - "\u0120reh", - "isiaj", - "mouth", - "\u0120scoring", - "l\u00c4\u00b1k", - "\u0120\u00ec\u0137\u0126\u00ec\u00a3\u00bc", - "ORIA", - "\u0120Estados", - "\u0120companion", - "\u0120assemble", - "\u0120punished", - "\u0120ital", - "\u0120prevents", - "istes", - "\u0120Kentucky", - "\u0120locate", - "\u0120fasting", - "\u00e3\u0123\u00a8\u00e6\u0122\u013f", - "\u0125\u0122", - "\u0120Seb", - "\u0120Crown", - "opia", - "\u0120whip", - "usz", - "\u00d0\u00ba\u00d0\u00b0\u00d0\u00bc\u00d0\u00b8", - "\u0120databases", - "\u00e5\u0143\u0139", - "\u0120prosec", - "\u01201997", - "\u0120\u00ec\u0124\u00b4\u00ec\u00a7\u013f", - "\u0120Solar", - "\u0120Pues", - "\u0120Zen", - "ollo", - "\u0120Guru", - "\u0120squeez", - "\u0120\u00d0\u0139\u00d0\u00b0", - "\u0120\u00c4\u012f", - "ceptions", - "cca", - "izable", - "mand", - "\u0120breakthrough", - "\u0120tablespoon", - "\u0120SEC", - "ikh", - "\u0120S\u00c3\u00a3o", - "\u0120\u00d0\u00bf\u00d0\u00bb\u00d0\u00be", - "amen", - "\u0120prac", - "\u0120darling", - "\u0120taller", - "\u0120rendering", - "\u0120\u00ec\u013c\u00b0\u00eb\u00a6\u00ac\u00ea\u00b0\u0122", - "\u0120\u00cf\u0126\u00ce\u00b7\u00cf\u0124", - "\u0120m\u00c3\u00a3", - "\u0120esos", - "uerdo", - "\u0120\u00d1\u0123\u00d1\u0129\u00d0\u00b8\u00d1\u0124", - "aller", - "\u00ec\u0139\u012a\u00ec\u0138\u00b4\u00ec\u013c\u0136", - "\u0120millones", - "lerin", - "\u0120pegar", - "onne", - "\u0120enrollment", - "\u0120liegt", - "\u0120boa", - "wi\u00c4\u013b", - "bsp", - "\u0120cycling", - "\u0120Bernie", - "\u01201989", - "\u0120\u00d0\u00b4\u00d0\u00b0\u00d0\u00bb\u00d1\u012e", - "\u0120Dakota", - "\u0120\u00d1\u0123\u00d0\u00b2\u00d1\u0131\u00d0\u00b7", - "\u0120CP", - "\u0120stare", - "\u00ed\u0124\u00a4", - "\u0120prosperity", - "\u0120arrangements", - "\u0120arriving", - "m\u00c3\u00a4", - "\u0120kayak", - "ipt", - "\u0120pardon", - "\u0120relat", - "\u0120verste", - "\u0120Fig", - "\u0120foil", - "\u0120Talking", - "peare", - "\u0120noi", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b8\u00d1\u012a", - "\u0120hockey", - "\u0120ado", - "\u0120OUT", - "67", - "\u0120hormones", - "\u0120Avenue", - "\u0120Superman", - "\u0120prescription", - "ubernetes", - "CL", - "otive", - "NIS", - "ienen", - "\u0120sadness", - "\u0120Vit", - "Ty", - "\u0120starter", - "\u0120bede", - "\u0120foundations", - "\u0120sore", - "\u00e5\u00ba\u0139", - "\u00d1\u012b\u00d0\u00b5\u00d1\u0123\u00d1\u0124\u00d0\u00b2", - "\u00ec\u013c\u00b0\u00eb", - "\u0120\u00d1\u0129\u00d1\u0125\u00d0\u00b2", - "link", - "\u0120maneu", - "working", - "\u00c3\u0142n", - "\u0120Attack", - "\u0120Cart", - "veis", - "\u0120Resp", - "ensing", - "\u0120\u00ec\u00a2\u012d\u00ec\u0137\u0126\u00ec\u013c\u0136", - "\u0120escuch", - "\u0120RNA", - "\u0124\u00b4", - "\u0120adop", - "\u0120bending", - "\u00d8\u00b9\u00d8\u00af", - "\u0120manages", - "usp", - "\u0120tart", - "\u0120router", - "Bo", - "\u0120establishing", - "\u0120balancing", - "\u0120athletic", - "\u0120Slo", - "\u0120fills", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d0\u00b1", - "\u0120\u00d0\u00b4\u00d0\u00b0\u00d0\u00bb", - "\u0120posso", - "\u0120Vielen", - "\u0120critics", - "\u0120lawsuit", - "\u0120Isaac", - "\u0120\u00d1\u0126\u00d0\u00b8\u00d0\u00bb\u00d1\u012e\u00d0\u00bc", - "\u0120tras", - "\u0120praw", - "\u0120Crazy", - "\u0120neu", - "\u0120kull", - "\u0120tumor", - "\u0120APP", - "gate", - "\u0120ARE", - "98", - "\u0120Steam", - "\u0120fucked", - "lage", - "\u0120\u00e2\u013b\u00ac", - "\u0120MD", - "fy", - "\u0120shells", - "\u0120Seems", - "izers", - "\u0120ranges", - "\u0120Antonio", - "ATION", - "\u0120Baba", - "\u0120\u00ec\u0125\u012b", - "kun", - "\u0120prayed", - "\u00d1\u0122\u00d1\u0131", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d1\u0124\u00d0\u00b8\u00d0\u00b2", - "\u0120seas", - "bury", - "\u0120\u00d7\u0136\u00d7\u00a9", - "\u0120trait", - "\u0120Depending", - "\u0120dre", - "\u0120k\u00c3\u00b6nnt", - "\u00d1\u0128\u00d1\u0125", - "\u0120lipstick", - "eez", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b8\u00d0\u00bc\u00d0\u00b5\u00d1\u0122", - "\u0120assignments", - "Bob", - "\u0120metals", - "\u0120specially", - "\u00e5\u00b0\u012f\u00e4\u00b8\u012f\u00e5\u00b0\u012f", - "\u0120\u00ec\u013a\u012a\u00eb", - "\u0120\u00c5\u00a1", - "\u0120vista", - "\u0120\u00ce\u00ac", - "\u0120twins", - "\u0120notable", - "\u0120Sau", - "\u0120d\u00c3\u00a9velop", - "\u0120\u00c3\u00a7ek", - "\u0120polynom", - "avam", - "\u0120tamb\u00c3\u00a9", - "\u00d0\u00be\u00d0\u00bd\u00d0\u00be\u00d0\u00bc", - "\u0120plasma", - "\u0120efect", - "\u0120l\u00c3\u00a4ng", - "\u0120casi", - "\u00d1\u0123\u00d0\u00b0", - "\u00c4\u00b1m\u00c4\u00b1", - "\u00e3\u0123\u013b\u00e3\u0124\u012d", - "\u0135\u00a4\u00ec\u013f\u0122", - "\u0120labour", - "ossen", - "\u0120Pun", - "rif", - "\u0120doses", - "\u0120operates", - "\u00d0\u00b8\u00d0\u00bb\u00d0\u00bb\u00d0\u00b8", - "\u0120jaar", - "staw", - "\u0120\u00ec\u0124\u00ac\u00eb\u0140\u0133", - "\u0120atm", - "\u0120protects", - "\u0120imped", - "HO", - "\u0120cima", - "\u0120toch", - "abis", - "\u0120sendo", - "laus", - "\u0120curl", - "\u0120Num", - "\u0120sponsors", - "\u0120d\u00c3\u00a9but", - "\u0120Alexa", - "\u0120B\u00c3\u00bcr", - "\u0120Amer", - "\u0120cope", - "\u0120\u00d0\u00b8\u00d0\u00b7\u00d0\u00b2", - "jal", - "\u01201995", - "apat", - "resse", - "\u0120Prize", - "\u0120Claire", - "\u0120Brandon", - "\u0120wszystko", - "\u0120valued", - "\u00e0\u00b8\u013b\u00e0\u00b8\u00b0", - "\u0120sect", - "\u0120secretly", - "\u0120diamonds", - "\u0120Evan", - "\u0120RPG", - "\u00e3\u0123\u00ab\u00e3\u0123\u00aa", - "\u012a\u00eb\u0131\u0126", - "\u0120Universal", - "\u0120doubts", - "\u0120Pin", - "wi\u00c4\u0127z", - "\u013c\u00a9", - "\u0120albo", - "\u0120braucht", - "AUL", - "\u0120Mobile", - "grades", - "\u0120schem", - "why", - "\u0120Nicht", - "pi", - "gle", - "\u0120chorus", - "\u0120gly", - "\u0120reinforce", - "\u0120muff", - "\u0120Shen", - "\u0120Hola", - "\u00d1\u0125\u00d0\u00b3", - "videmment", - "vial", - "acious", - "laimed", - "\u0120Rico", - "\u0120vegg", - "\u0120illustration", - "\u0120Butter", - "owad", - "\u0120eux", - "\u0120enfants", - "\u0120Leader", - "\u0120Village", - "etically", - "\u00d9\u0128\u00d9\u012c", - "\u0120stew", - "\u0120surprises", - "\u0120cue", - "\u0120Grandma", - "\u0120Celsius", - "\u0120Richt", - "enc", - "\u0120petition", - "\u0120herb", - "\u0120wicked", - "\u0120schle", - "ocaly", - "\u0120transf", - "\u0120tokens", - "\u0120Gray", - "\u0120BBC", - "IK", - "\u01201500", - "zn", - "\u0120Nev", - "\u0120koy", - "\u0120zar", - "\u0120bullshit", - "\u0120Colombia", - "ulative", - "\u0120widespread", - "yect", - "kit", - "\u0120empresa", - "\u0120nour", - "\u0120burns", - "atin", - "aired", - "\u0120revolutionary", - "\u0120\u00d0\u00b3\u00d0\u00be\u00d0\u00b4\u00d1\u0125", - "\u0120Logan", - "\u01201996", - "\u0120Graham", - "reb", - "\u0120NHS", - "\u00e6\u013e\u013d", - "\u0120costumes", - "\u0120nawet", - "\u0120lovers", - "\u0120Lucy", - "\u0120Indigenous", - "\u00ed\u0137\u013a\u00ea\u00b8\u00b0", - "\u0120immunity", - "\u00a5\u00b4\u00eb", - "uito", - "\u0120excessive", - "\u0120donations", - "\u0120\u00d7\u0136\u00d7\u00a8", - "\u0120\u00ec\u00b2\u00ab", - "\u00e9\u012b\u0126", - "\u0120drying", - "melon", - "\u0120surveys", - "\u0120\u00eb\u00ac\u00b4\u00ec\u012c\u00a8", - "\u00e9\u00a2\u00a8", - "aaa", - "\u0120probe", - "ancial", - "\u0120louder", - "\u0120hotels", - "\u00c3\u00bc\u00c4\u0141", - "agner", - "\u0120origins", - "\u0120\u00eb\u00a7\u012a\u00ec\u00a7\u0122\u00eb\u00a7\u012b", - "\u0120**", - "\u0120strangers", - "\u0120Haus", - "comed", - "\u0120anthrop", - "\u0120uso", - "\u0120\u00ec\u0137\u0126\u00ec\u00a7\u0123", - "\u0120Yuan", - "\u0120\u00ed\u0137\u0126\u00ec\u013c\u0136", - "pler", - "ressive", - "\u0120spraw", - "\u0120Stew", - "\u01201994", - "\u0120elders", - "\u0120meinen", - "\u0120junt", - "\u0120acoust", - "\u0120Wohn", - "\u0120bananas", - "\u0120projection", - "\u0120Stick", - "legt", - "speed", - "\u0120c\u00c5\u00a9ng", - "\u0120Wort", - "\u0120Baltimore", - "\u0120\u00d1\u0128\u00d0\u00b5\u00d0\u00bb", - "\u0120dunno", - "\u00e5\u00bc\u00b7", - "?,", - "\u00e3\u0125\u012b\u00e3\u0125\u00b3", - "\u0120Local", - "osto", - "\u00d0\u0143", - "\u00d0\u00be\u00d0\u00b4\u00d0\u00b0", - "\u0120Portuguese", - "\u0120theirs", - "\u0120d\u00c3\u00a9m", - "\u00e5\u0131\u00a6", - "\u0120drauf", - "\u0120Buddhist", - "erta", - "Ge", - "\u0120carrot", - "\u0120Wonderful", - "\u0120soak", - "\u0120chairman", - "ggi", - "ICA", - "fried", - "\u0120flick", - "\u0120Throughout", - "\u0120\u00ec\u013c\u00b0\u00eb", - "\u0120cough", - "\u0120fluffy", - "school", - "\u0120ripped", - "--------", - "\u0120Zukunft", - "\u0120\u00d0\u00bd\u00d0\u00b5\u00d0\u00b1", - "\u0120sto", - "\u0120BO", - "pent", - "\u0120Lawrence", - "\u00cf\u012b\u00cf\u0124", - "sticks", - "\u0120Eins", - "\u0120\u00d1\u0122\u00d1\u012d", - "\u0120Strong", - "\u0120caramel", - "\u0120spite", - "azar", - "\u00e9\u0125\u00bd\u00e6\u013a\u00af", - "\u0120critically", - "\u0120obra", - "owitz", - "\u0120Zone", - "\u0120\u00d1\u0122\u00d0\u00b5\u00d0\u00ba", - "\u0120sug", - "arded", - "\u0120g\u00c3\u00ac", - "ffentlich", - "anche", - "\u00d8\u0141", - "astically", - "\u00ec\u013f\u00bc\u00eb", - "\u00d0\u00bb\u00d0\u00b0\u00d0\u00b2", - "\u0120simplest", - "\u0120Friend", - "\u0120quello", - "\u0120ambition", - "\u0120abbiamo", - "\u00e5\u00ba\u0137", - "\u0120\u00d1\u0126\u00d0\u00be\u00d1\u0122\u00d0\u00bc", - "\u0120Essa", - "\u0120educators", - "\u0120statistical", - "\u00e9\u0122\u013b\u00e9\u0124\u012c", - "\u0120changer", - "\u0120atau", - "\u00c3\u00a9tais", - "\u0120Shakespeare", - "\u00eb\u0132\u013a", - "\u0120triggers", - "\u0120realiz", - "\u0120celui", - "wheel", - "\u0120loyalty", - "\u0120screams", - "kehr", - "\u0120Mega", - "east", - "\u0120tops", - "\u0120Totally", - "ountain", - "lord", - "\u0120violation", - "\u0120GA", - "\u0120nicer", - "\u0120Fresh", - "\u0120Melissa", - "function", - "\u0120rape", - "\u0120exceptions", - "\u0120silicon", - "\u0120liberty", - "\u0120households", - "\u00e3\u0123\u012f\u00e3\u0123\u00be\u00e3\u0123\u013b", - "\u0120CA", - "\u0120\u00d0\u0140\u00d0\u00b1", - "\u0120lib", - "\u0140\u012e", - "cific", - "\u0120tropical", - "\u0120investigating", - "HD", - "\u0120adapter", - "\u0120Pitt", - "ancia", - "\u0120Shell", - "friendly", - "\u0120conclusions", - "\u0120turtle", - "\u0120decomp", - "\u0120animations", - "\u0120\u00d1\u0123\u00d0\u00b5\u00d0\u00ba", - "insi", - "\u0120retention", - "kie", - "\u0120injection", - "\u0120Madison", - "\u00ec\u00b0\u00b0", - "\u0120vient", - "\u0120varied", - "\u0120violin", - "\u0120Bil", - "\u0120luckily", - "\u0120htt", - "l\u00c3\u00a4", - "\u0120ranch", - "\u00e7\u013e\u012d\u00e7\u013e\u012d", - "\u0120s\u00c3\u00b3lo", - "\u00ec\u0137\u0127", - "\u0120Derek", - "\u0120Scripture", - "\u00d0\u00be\u00d1\u0122\u00d0\u00b0", - "\u0120classrooms", - "avil", - "formed", - "\u0120beforehand", - "\u0120Gem", - "prech", - "\u0120lin", - "\u0120greens", - "\u00d1\u0128\u00d0\u00b5\u00d0\u00b2", - "\u0120Mercedes", - "\u0120drought", - "gasps", - "\u0120abortion", - "\u0120terribly", - "\u0120spos\u00c3\u00b3b", - "\u0120secured", - "\u0120atr\u00c3\u00a1s", - "\u0120wavelength", - "\u0120grains", - "ective", - "\u0120spacecraft", - "\u0120tours", - "\u0120profes", - "\u0120surgeon", - "\u0120Pie", - "\u0120ideally", - "arner", - "UP", - "opard", - "sce", - "\u0120immense", - "\u0120Ort", - "roller", - "\u0120Dallas", - "\u0120Nicholas", - "\u0120sulf", - "\u0120Toyota", - "\u0120quantities", - "ceans", - "\u0120cui", - "an\u00c3\u00a7a", - "\u0120CAN", - "itzerland", - "\u00e5\u0126\u00bf", - "\u0120zou", - "\u0120Cyber", - "legen", - "\u0120Init", - "edu", - "\u0120apert", - "\u0120adjac", - "ouv", - "\u00e8\u0122\u012e\u00e4\u00b8\u0136", - "rs", - "\u0120cabbage", - "\u0120wheelchair", - "inyl", - "\u0120Dynam", - "\u0120\u00ec\u0137\u0126\u00eb\u012d\u012a\u00eb\u013f\u00bc", - "\u0120ling", - "hl", - "\u0120\u00d0\u00bc\u00d0\u00be\u00d0\u00b3\u00d1\u0125", - "\u0120crisp", - "\u0120mij", - "\u0120dug", - "nin", - "\u0120bloss", - "\u0120belonging", - "\u0120loudly", - "\u0120minerals", - "\u0120concluded", - "\u0120searched", - "96", - "\u0120Meet", - "\u0120SEO", - "\u0120\u00d0\u00a1\u00d0\u00ba", - "\u0120Hob", - "otta", - "\u0120propaganda", - "\u0120cinnamon", - "\u0120hunter", - "\u0120gemeins", - "\u0120sculpture", - "ulsion", - "\u0120v\u00c3\u00a4l", - "\u0120magazines", - "\u0120controversy", - "\u00e4\u00b8\u0122\u00e6\u00a8\u00a3", - "\u0120sequences", - "\u00e3\u0123\u0126\u00e3\u0124\u012d", - "\u0120\u00ed\u013c\u012e", - "\u0120deleted", - "\u00e4\u00bd\u00bf", - "\u0132\u00eb\u0131\u0126", - "\u0120varying", - "\u00e3\u0125\u0128", - "\u0120mounting", - "\u0120affair", - "\u0120pathways", - "\u00e6\u00a6", - "\u0120digo", - "\u00e4\u00ba\u00ae", - "\u0120\u00d0\u00b4\u00d0\u00be\u00d0\u00ba", - "Alex", - "\u0120tobacco", - "\u0120CV", - "\u0120bothered", - "\u0120ambient", - "inky", - "\u0120SL", - "\u0120hates", - "\u0120je\u00c5\u00bceli", - "\u0120congreg", - "\u0120elas", - "\u0120deuts", - "\u0120Studios", - "ch\u00c4\u013b", - "\u0120documented", - "\u0120Cruz", - "\u0120Len", - "\u0120Douglas", - "\u0120Portugal", - "enti", - "\u0120spouse", - "\u0120analys", - "avia", - "\u0120edited", - "\u0120l\u00e1\u00ba\u00a1i", - "built", - "\u0120ville", - "adora", - "\u0120bracelet", - "\u0120sushi", - "\u0120pm", - "\u0120trails", - "\u0120lug", - "\u0120\u00c3\u00b6ver", - "\u0120sorrow", - "\u0120colony", - "adox", - "\u0120serie", - "anyak", - "\u0120\u00d8\u00b7", - "\u0120Gulf", - "\u00e6\u013a\u00af\u00e4\u00b8\u012f\u00e6\u013a\u00af", - "\u0120PV", - "\u0120Samuel", - "\u0120Kit", - "\u0120Ral", - "ontin", - "expl", - "\u0120entries", - "\u0120activists", - "Ps", - "\u0120sant", - "\u0120\u00d1\u0124\u00d0\u00be\u00d1\u0129", - "\u0120Bruno", - "keley", - "\u0120tutto", - "\u00e9\u0136", - "\u0120vintage", - "\u0120terrified", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d1\u0127", - "usive", - "owers", - "\u00d0\u00b0\u00d0\u00b9\u00d1\u0124", - "\u00eb\u0131\u013b", - "\u0120twisted", - "\u0120Thought", - "\u0120tah", - "\u0120shrink", - "\u0120sheer", - "lit", - "\u0120dalam", - "\u0120dib", - "\u0120vard", - "owane", - "\u0120dobr", - "\u0120Rena", - "\u0120\u00d1\u0123\u00d0\u00b2\u00d0\u00be\u00d1\u0130", - "\u0120pa\u00c3\u0143ses", - "\u0120Era", - "\u00e3\u0123\u00ae\u00e3\u0123\u00a7", - "\u0120BUT", - "sighs", - "\u0120\u00ea\u00b7\u00b8\u00ea\u00b1\u00b0", - "\u0120gro\u00c3\u0141en", - "\u0120\u00eb\u00b9\u00a8\u00eb\u00a6\u00ac", - "\u0120nerves", - "\u0120constit", - "\u0120preocup", - "\u0120Gay", - "\u0120Xu", - "keeper", - "heure", - "..)", - "\u0120Calm", - "\u0120Unidos", - "\u0120\u00ec\u013f\u00b4\u00ea\u00b2\u0125", - "\u0120Aqui", - "\u0120\u00ec\u0142\u013e\u00ec\u013f\u00bc", - "d\u00c4\u00b1r", - "\u00ec\u00a6\u013a", - "your", - "\u0120\u00d1\u012f\u00d1\u0124\u00d0\u00b8\u00d0\u00bc", - "2020", - "\u0120rund", - "\u0120HO", - "\u0120Catherine", - "ieli", - "\u0120fusion", - "\u0120ideology", - "\u0120foram", - "shaped", - "\u0120\u00ed\u013d\u0126\u00eb", - "\u0120wt", - "\u0120retr", - "\u0120pr\u00c3\u00a9c", - "\u0120\u00ea\u00b0\u0133", - "\u0120openly", - "vity", - "\u00ea\u00b5\u00ac\u00ec\u013c\u0136", - "\u0120obstacle", - "\u0120boo", - "\u0120seiner", - "icorn", - "\u0120eigenlijk", - "\u0120header", - "aremos", - "\u0120softer", - "\u0120\u00d0\u0141\u00d0\u00be\u00d0\u00b4", - "\u0120prejud", - "\u0120defines", - "ierte", - "\u0120blending", - "\u0120believers", - "\u0120Wochen", - "\u0120\u00d0\u00bd\u00d0\u00b8\u00d0\u00ba\u00d0\u00b0\u00d0\u00ba", - "\u0120\u00d0\u013c\u00d0\u00be\u00d0\u00b3\u00d0\u00b4\u00d0\u00b0", - "\u0120Typically", - "\u0120\u00ed\u0123\u00ac", - "\u00e7\u00ae\u00a1", - "cios", - "\u0120missiles", - "\u0120sponge", - "\u0120Kitchen", - "\u0120tren", - "ningen", - "\u0120scrap", - "\u0120serait", - "\u00b4\u00ec\u0142", - "\u00e7\u00b9", - "\u0120\u00eb\u00b0\u013a\u00eb", - "\u0120restored", - "\u0120przyk\u00c5\u0124ad", - "\u0120Kubernetes", - "\u0120sait", - "\u0120uw", - "\u0120enabling", - "\u0120travers", - "amps", - "\u00e5\u0131\u0139", - "\u0120OMG", - "ensor", - "\u0120zosta", - "\u0120pronounced", - "Ang", - "normal", - "\u0120economies", - "tin", - "\u0120Champion", - "izen", - "\u0120arbeiten", - "\u0120Gospel", - "\u0120Zu", - "nga", - "\u0120literacy", - "\u0120Mans", - "\u0120circulation", - "\u0120adap", - "\u0120Total", - "\u0120mereka", - "\u0120olacak", - "\u00d1\u0123\u00d1\u0124\u00d0\u00b0\u00d1\u0124\u00d0\u00b8", - "Jack", - "\u0120mund", - "\u0120thief", - "bies", - "\u0120\u00ea\u00b2\u0123", - "aque", - "\u0120\u00da\u00a9\u00db\u012e", - "\u0120Scar", - "\u00e5\u00b2", - "\u0120abol", - "\u0120devote", - "\u012001", - "\u0120sitten", - "\u0120Visual", - "week", - "some", - "ingt", - "\u0120journalism", - "\u0120Hir", - "\u0120Bachelor", - "inery", - "\u00c3\u013eND", - "\u00e3\u0125\u0141", - "\u00e7\u00bb\u013b", - "\u0120coloring", - "\u0120Crist", - "\u0120celebrities", - "\u0120\u00d1\u0129\u00d0\u00b8\u00d1\u0123", - "\u0120Crit", - "\u0120differentiate", - "\u0120\u00d0\u013e\u00d0\u00bd\u00d0\u00b5", - "elim", - "\u0120seafood", - "\u0120algumas", - "otherapy", - "\u00e6\u012a\u00b0", - "\u0120glaub", - "\u0120arbitrary", - "gens", - "\u0120\u00d0\u00b1\u00d1\u0125\u00d0\u00b4\u00d0\u00b5\u00d0\u00bc", - "\u0120tav", - "\u0120creamy", - "\u0120Country", - "a\u00c3\u00b1", - "\u00d0\u00bc\u00d0\u00b5\u00d1\u0124", - "\u0120hinter", - "\u0120mism", - "\u0120illustrate", - "\u00c3\u013eNDNIS", - "\u0120decreasing", - "\u0120weniger", - "AKI", - "ixon", - "\u0120\u00d0\u00bd\u00d0\u00b5\u00d0\u00b9", - "\u0120fatto", - "\u0120nerd", - "\u00e7\u0142", - "\u0120bitte", - "Per", - "\u0120tane", - "\u0120g\u00c3\u00b6z", - "\u0120forte", - "\u0120Ey", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d0\u00b2\u00d0\u00b5\u00d1\u0122", - "\u00e8\u00a2\u00ab", - "\u0120WordPress", - "\u0120Mis", - "\u00c5\u00af", - "z\u00c3\u00a4h", - "\u0120int\u00c3\u00a9ress", - "osaurs", - "\u0120Falls", - "\u0120nessa", - "97", - "\u0120museums", - "\u0120corresponds", - "\u0120sings", - "four", - "\u0120eder", - "\u0120Communist", - "oa", - "nek", - "\u0120WHO", - "\u0120corpo", - "\u0120messing", - "\u00cf\u0126\u00ce\u00b1\u00ce\u00b9", - "\u0120brushes", - "\u0120bisc", - "\u0120Arbeits", - "\u0120Tax", - "\u0120sele", - "\u0120flags", - "oupe", - "\u0120anticipated", - "\u00e3\u0125\u0133", - "\u0120Nad", - "\u0120poured", - "\u0120ml", - "\u0120llama", - "\u0120visualize", - "\u0120listeners", - "\u00d9\u0126\u00d9\u0125", - "alten", - "Michael", - "\u0120cos\u00c3\u00ac", - "\u00d5\u00a1\u00d5", - "opus", - "\u0120\u00ed\u0137\u00b4\u00ec\u00a3\u00bc", - "\u0120hike", - "\u0120Attorney", - "\u0120Hillary", - "uded", - "\u0120\u00ed\u0137\u013a\u00ec\u00a7\u0122\u00eb\u00a7\u012e", - "\u0120dove", - "\u0120storms", - "\u00d0\u00b0\u00d0\u00ba\u00d1\u0123", - "\u0120doctrine", - "\u0120hex", - "iks", - "no\u00c5\u013d\u00c4\u0129", - "\u0120scripts", - "\u0120\u00ce\u00b4\u00ce\u00b5\u00ce\u00bd", - "\u0120\u00d1\u012f\u00d1\u0124\u00d0\u00b8\u00d1\u0127", - "\u0120\u00d0\u0128", - "aber", - "\u0120Vas", - "\u0120centimeters", - "\u00d7\u0140\u00d7\u0136", - "\u00d0\u00bd\u00d0\u00b8\u00d0\u00b1", - "\u0120riders", - "\u0120Trib", - "\u00e5\u012e\u0127", - "\u0120tak\u00c5\u00bce", - "\u0120noun", - "\u0120icons", - "\u0120solely", - "minded", - "\u0120dispon", - "\u0120Switzerland", - "\u0120clusters", - "\u0120queda", - "ailing", - "\u0120manga", - "\u012068", - "\u0126\u012a", - "\u0120tet", - "gins", - "haus", - "\u00e7\u00a9\u00ba", - "\u00e5\u00b7\u00a5", - "\u0120OP", - "oted", - "\u0120nouveau", - "ALLY", - "\u00d9\u012a\u00d8\u00af", - "\u00c3\u00b2n", - "\u0120mortality", - "\u0120GitHub", - "drop", - "\u0120disgu", - "\u0120recom", - "\u0120locals", - "\u0120homemade", - "amba", - "\u0120pronunciation", - "\u0120alphabet", - "\u00d0\u00b0\u00d0\u00bd\u00d1\u012e", - "owany", - "iras", - "idency", - "OME", - "\u0120\u00d1\u0122\u00d0\u00b0\u00d1\u0123\u00d1\u0123", - "arak", - "viamente", - "\u0120nonprofit", - "\u0120YouTuber", - "\u0120parenth", - "\u0120Boo", - "vat", - "\u0120Stir", - "\u0120precip", - "\u0120ants", - "\u0120ally", - "\u0120Maori", - "\u0120\u00eb\u012e\u0122\u00ed\u0137\u013e", - "\u00e5\u0131\u00af\u00e6\u013a\u00af", - "ogene", - "\u0120Labour", - "arette", - "\u0120recycling", - "ensa", - "\u0120pursuit", - "\u0120sak", - "\u0120\u00d0\u0139\u00d0\u00b4\u00d0\u00b5\u00d1\u0123\u00d1\u012e", - "\u0120tolerance", - "\u0120saat", - "\u0120clicked", - "\u00e2\u013b\u00a5", - "\u0120facebook", - "\u0120Into", - "\u0120incentives", - "\u00ea\u00b8\u00b0\u00eb\u012c\u0136", - "\u0120Dennis", - "\u0120Wik", - "gesch", - "\u00e0\u00b9\u0122\u00e0\u00b8\u013d", - "\u0120\u00cf\u0122\u00ce\u00b1", - "\u0120Whoo", - "\u0120rounded", - "\u0120dope", - "\u0120capturing", - "\u0120Warri", - "\u0120civilian", - "\u0120charming", - "\u0120esas", - "\u0120sustained", - "\u0120leaning", - "\u0120abundance", - "\u00c3\u0143lia", - "\u00d0\u00b0\u00d0\u00bb\u00d1\u012e\u00d0\u00bd\u00d1\u012d\u00d0\u00b9", - "\u0120ph\u00e1\u00ba\u00a3i", - "acja", - "\u0120\u00ea\u00b0\u013b\u00ec\u0137\u0126", - "activ", - "\u00e0\u00b8\u00b2\u00e0\u00b8\u00a2", - "\u012097", - "\u0120\u00d0\u00bc\u00d0\u00be\u00d0\u00b9", - "cro", - "\u0120Jackie", - "ittees", - "bracht", - "ulent", - "\u0120\u00ec\u0142\u013e\u00eb", - "\u0120plugin", - "vantage", - "party", - "\u0120suas", - "\u0120ante", - "\u00d1\u0125\u00d0\u00bb", - "\u00d0\u013f\u00d0\u0132", - "\u00e6\u0124\u00a8", - "\u0120\u00cf\u0125\u00cf\u0127", - "\u0120meth", - "\u0120enthusiasm", - "\u00d1\u0131\u00d1\u0124\u00d1\u0123\u00d1\u0131", - "\u00ed\u013b\u0136\u00eb", - "\u0120synthetic", - "\u0120seasoning", - "\u0120Lost", - "onomy", - "\u0120Spark", - "\u0120bure", - "\u0120assured", - "\u0120imagin", - "\u0120carro", - "Sha", - "\u00c4\u0127t", - "\u00d0\u00bd\u00d1\u0125\u00d1\u0124\u00d1\u012e", - "\u00c3\u00a1tica", - "TY", - "\u0120kern", - "\u0120Brazilian", - "\u00c3\u00b0", - "\u0120suspended", - "\u0120Carib", - "\u0120bizim", - "\u0120Oliver", - "\u00e3\u0123\u00b6", - "Tom", - "\u0120\u00d0\u00bf\u00d0\u00bb\u00d0\u00b0\u00d0\u00bd", - "\u0120nope", - "omething", - "\u0120beiden", - "\u00d1\u0128\u00d0\u00b5\u00d0\u00bd", - "\u0120fluct", - "\u0120\u00ce\u00bc\u00ce\u00bf\u00cf\u0127", - "\u0120fathers", - "\u0120Blake", - "\u0120upward", - "\u0120Dash", - "\u0120Lil", - "\u0120\u00ec\u012a\u013a\u00eb\u0131\u0126", - "\u0120revelation", - "\u0120elevated", - "\u0120Jiang", - "LED", - "\u0120Thompson", - "\u0120\u00d0\u00bc\u00d0\u00be\u00d0\u00b3\u00d1\u0125\u00d1\u0124", - "\u00d1\u0123\u00d1\u0124\u00d1\u0122\u00d1\u0125", - "ifiers", - "\u0120comeback", - "\u0120buyers", - "\u00ea\u00b2\u00b0", - "\u0120Sales", - "\u00d0\u00b8\u00d1\u0129\u00d0\u00b5", - "ciones", - "\u0120whistle", - "\u0120dull", - "LEX", - "\u0120\u00ed\u0137\u013a\u00ea\u00b2\u0142\u00ec\u012c\u00b5\u00eb\u012d\u012a\u00eb\u012d\u00a4", - "\u0120criminals", - "\u0120descent", - "ipple", - "mas\u00c4\u00b1", - "\u0120foolish", - "\u0120\u00d0\u00b4\u00d1\u0125\u00d0\u00bc\u00d0\u00b0\u00d1\u0130", - "tar", - "\u0120mango", - "\u0120choreography", - "Matt", - "\u0120territor", - "\u0120acaba", - "\u0120Einstein", - "\u0120IBM", - "\u0120Metal", - "\u0120Crystal", - "\u0120rah", - "\u0120foul", - "\u0120Islands", - "\u0120intact", - "\u0120Rail", - ".:", - "\u0120ac\u00c3\u00a1", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d0\u00bf", - "\u00d0\u00b5\u00d1\u0122\u00d0\u00b5", - "\u0120Write", - "hehe", - "\u0120FO", - "\u0120\u00cf\u0125\u00cf\u0126\u00ce\u00b7", - "\u0120doin", - "held", - "\u0120appropriately", - "\u0120deliberately", - "\u0120archive", - "\u0120giveaway", - "\u00e3\u0123\u0135\u00e3\u0123\u0135", - "\u0120finale", - "\u00d0\u00bb\u00d0\u00b0\u00d1\u0123", - "\u00d0\u00b5\u00d0\u00bd\u00d0\u00be", - "\u00c6\u00a1n", - "\u00e6\u00a3\u0134", - "ogo", - "\u00e7\u012b\u00a9", - "\u0120Audience", - "\u00e3\u0127\u0142", - "\u0120subur", - "\u0120headache", - "\u00d0\u00b0\u00d0\u00bd\u00d0\u00bd\u00d1\u0131", - "\u0120Witch", - "\u0120Swedish", - "\u0120BI", - "\u0120erase", - "\u0120khi", - "\u0120commentary", - "\u0120Sultan", - "\u00ed\u0125\u013f", - "\u0120Leban", - "\u0120\u00eb\u00b3\u00b4\u00ec\u012d", - "\u0120Pam", - "pekt", - "month", - "\u0120grounded", - "\u00ea\u00be", - "\u0120\u00c5\u0141ekilde", - "250", - "\u0120SCH", - "ioso", - "\u0120inaug", - "heimer", - "\u0120reflecting", - "\u0120Ruth", - "\u0120Oil", - "\u0120trouver", - "uep", - "..]", - "\u0120\u00ec\u0140\u012a\u00eb", - "\u0120olha", - "\u0120reasonably", - "\u0120glitch", - "UB", - "\u0120Gran", - "\u0120adalah", - "\u0120lent", - "\u00d8\u00b1\u00d8\u00a7", - "\u0120traction", - "\u0120adjusting", - "\u00b4\u00a4", - "\u00d0\u00bd\u00d0\u00b8\u00d0\u00b1\u00d1\u0125\u00d0\u00b4\u00d1\u012e", - "\u0120\u00d0\u00b4\u00d0\u00be\u00d0\u00bf", - "\u0120stretched", - "\u0120ort", - "\u0120cosine", - "viol", - "\u0120\u00ec\u0127", - "cir", - "\u0120bastard", - "\u00e4\u00b8\u0129", - "\u0120\u00d1\u0127\u00d0\u00be\u00d0\u00b4", - "\u0120quier", - "\u0120pressures", - "\u0120Anh", - "\u00e5\u00b9\u00be", - "\u0120elles", - "\u0120\u00d0\u00b4\u00d1\u0122\u00d1\u0125\u00d0\u00b7", - "\u0120\u00d0\u00bc\u00d0\u00be\u00d0\u00b6\u00d0\u00b5\u00d1\u0124\u00d0\u00b5", - "\u0120ch\u00e1\u00bb", - "\u0120M\u00c3\u00a9", - "\u00c3\u00b6k", - "\u00e1\u00ba\u00a7u", - "\u00ec\u0142\u012a", - "zin", - "\u0120caution", - "iban", - "\u0120judging", - "\u00d1\u0125\u00d1\u0130\u00d1\u0124", - "\u0120baj", - "\u0120\u00d0\u00a1\u00d0\u00b5\u00d0\u00b9\u00d1\u0129\u00d0\u00b0\u00d1\u0123", - "\u0120Poor", - "\u0120Nazi", - "\u0120upbeat", - "yang", - "\u0120weekends", - "\u0120Essentially", - "\u0120oluyor", - "\u0120spatial", - "acker", - "\u0120seller", - "\u0120\u00d7\u0132\u00d7\u0137\u00d7\u00aa", - "\u0133\u00d7\u013e", - "\u0120vivid", - "\u0120Bond", - "\u00ea\u00b6\u012e", - "iskt", - "\u00e3\u0124\u00b5", - "\u0120goat", - "driver", - "\u0120mug", - "ictional", - "\u0120allt", - "\u0120Initi", - "\u0120Rand", - "\u0120finishes", - "\u0120\u00ea\u00b0\u012a", - "\u0120vitam", - "\u0120teenagers", - "\u0120Morris", - "\u00ec\u00a4\u0126", - "\u0120Ori", - "iya", - "\u0120my\u00c3\u00b6s", - "Step", - "\u0120Kre", - "\u00e8\u00be\u00a6", - "\u0120dinosaur", - "\u0120\u00eb\u00aa\u0129", - "affe", - "\u0120\u00eb\u0132\u00a9\u00eb\u012d\u012a\u00eb\u012d\u00a4", - "\u0120zeg", - "\u00e5\u012a\u0129", - "\u0120Manhattan", - "\u0120sujet", - "uelle", - "stoff", - "\u0120d\u00c3\u00bcr", - "\u0120submar", - "eses", - "\u0120aquele", - "\u0120nou", - "\u0120Faith", - "tz", - "\u0120\u00d1\u0124\u00d0\u00be\u00d0\u00bc\u00d1\u0125", - "aceut", - "liers", - "\u0120bandwidth", - "\u00c6\u00b0\u00e1\u00bb\u013f", - "\u0120respective", - "\u0120Ave", - "\u0120spreadshe", - "\u0120Sent", - "icamente", - "\u0120infra", - "\u0120learners", - "\u0120\u00e0\u00ae\u012b", - "aiah", - "renal", - "\u0120mustard", - "\u0120habt", - "\u00e7\u0125", - "\u0120Qu\u00c3\u00a9", - "\u0120analyzing", - "\u00e6\u00af\u0131", - "\u0120solic", - "\u0120\u00d7\u0136\u00d7\u0137\u00d7\u0132", - "\u0120causa", - "\u0120welcomed", - "\u0120Success", - "\u0120facile", - "\u0120\u00d0\u0141\u00d0\u00be\u00d1\u0124\u00d0\u00be\u00d0\u00bc\u00d1\u0125", - "schein", - "\u0120fetch", - "\u0120strat", - "\u0120\u00d1\u0123\u00d1\u0124\u00d0\u00be\u00d0\u00b8\u00d1\u0124", - "\u00ec\u0139\u0132\u00ec\u0126\u013e\u00eb\u012c\u0136", - "\u0120\u00d1\u0123\u00d0\u00bf\u00d0\u00be\u00d1\u0123\u00d0\u00be\u00d0\u00b1", - "mam", - "\u0120ser\u00c3\u0143a", - "naments", - "writer", - "\u0120consulting", - "\u00ed\u013a\u0122", - "\u0120Berkeley", - "eu", - "asive", - "UU", - "\u0120Analyt", - "\u0120submission", - "\u0120magnificent", - "enza", - "\u0120econ", - "\u0120profiles", - "\u0120incar", - "Ab", - "\u0120Nun", - "\u0120hic", - "screaming", - "\u0120resilient", - "\u00e5\u012a\u00a9", - "grund", - "\u0120concur", - "\u0120bereits", - "LD", - "\u0120nurt", - "\u00ec\u012b", - "\u0120feast", - "\u0120encuent", - "\u0120Michel", - "\u0120suprem", - "\"]", - "\u0120feeds", - "\u0120Kollegen", - "isser", - "\u0120Feng", - "\u0120Wen", - "mun", - "\u0120ten\u00c3\u0143a", - "\u0120Wrest", - "\u0120\u00ec\u013a\u00a4\u00eb\u012c\u013a\u00ec\u013f\u0122", - "\u0120stead", - "\u0120restoration", - "\u0120donated", - "\u0120dels", - "\u0120census", - "\u0120desperately", - "worthy", - "HE", - "\u0120Spa", - "\u0120Bryan", - "\u0120hj", - "\u0120Raw", - "\u00ec\u0137\u0126\u00eb", - "\u0120Camera", - "\u0120zien", - "\u0120styl", - "\u0120TW", - "\u0120Cheese", - "borne", - "\u0120obl", - "\u0120Already", - "\u0120unstable", - "\u0120flames", - "post", - "Ha", - "romagn", - "\u0120\u00ec\u0139\u0126\u00eb\u00a7\u012a", - "dest", - "\u0120kolej", - "\u0120temporarily", - "\u0120determining", - "\u0120Glass", - "\u00d1\u0122\u00d0\u00be\u00d0\u00bd", - "olan", - "\u0120dominated", - "\u00e5\u012e\u0138", - "____", - "\u0120\u00d9\u0129\u00d8\u00b0\u00d8\u00a7", - "\u0120Dana", - "\u0120dinheiro", - "aqu", - "\u00eb\u00af\u00bc", - "\u0120\u00c3\u0142s", - "\u0120Joey", - "\u0120Griff", - "\u0120attain", - "\u0120transitions", - "\u0120Literally", - "\u00d0\u00b5\u00d0\u00bd\u00d0\u00b4", - "\u0120Haven", - "\u0120grabbing", - "\u0120crystals", - "\u0120Fourth", - "\u0120candles", - "\u0120\u00d1\u0123\u00d0\u00bb\u00d1\u0125\u00d1\u0129\u00d0\u00b0", - "rico", - "\u01205000", - "etto", - "\u0120undo", - "\u0120kto", - "\u0120divert", - "\u0120chir", - "\u0120persec", - "\u0120hiking", - "\u0120announcements", - "\u00e7\u0136\u00b1", - "\u00d0\u00b7\u00d1\u012d", - "\u0120auc", - "\u0120systemic", - "\u0120RM", - "\u00cf\u0125\u00ce\u00b1", - "\u0120\u00d0\u0136\u00d0\u00b6", - "\u0120yar", - "\u0120Ward", - "\u0120pissed", - "\u0120carn", - "\u0120autonomous", - "\u00e3\u0127\u0130\u00e3\u0127\u0130", - "sover", - "\u00e6\u00b2\u0134\u00e9\u012e\u00af", - "\u00e5\u00be\u012a\u00e5\u00a5\u00bd", - "\u0120reflex", - "\u0120gardens", - "\u0120dated", - "\u00ec\u00b1", - "ami\u00c4\u013b", - "\u0120continuity", - "\u0120citizenship", - "\u0120schwer", - "\u0120zak", - "table", - "\u0120\u00d1\u0123\u00d1\u0129", - "\u00e8\u00a7\u0123", - "\u0120\u00cf\u0125\u00ce\u00b5", - "\u0120generates", - "\u00ea\u00b5\u00ac\u00eb\u0124\u013a", - "\u00c3\u00b6h", - "\u00c3\u00b3m", - "alam", - "\u0120JUDY", - "\u0120Bug", - "\u0120\u00e3\u0123\u00a6", - "\u0120drones", - "\u0120\u00c3\u00a1gua", - "acaks", - "\u00e6\u013c", - "\u0120\u00d0\u013c\u00d0\u00be\u00d0\u00bd", - "\u00d7\u0138\u00d7\u0136", - "\u0120strive", - "\u0120Altern", - "\u0120nearest", - "\u0120proyect", - "tera", - "\u0120ASHLEY", - "\u0120worm", - "\u0120replay", - "\u0120tara", - "\u0120Indians", - "\u00e3\u0124\u00b0", - "icaid", - "\u0120\u00ec\u012a\u013e", - "\u0120appealing", - "\u0120Wes", - "\u0120mentions", - "\u0120\u00d0\u00b4\u00d0\u00b5\u00d0\u00bb\u00d0\u00b5", - "\u0120kw", - "\u0120fragile", - "isz", - "k\u00c3\u00b3w", - "hang", - "color", - "\u0120presidente", - "87", - "\u00d0\u00b5\u00d1\u0126", - "\u00e7\u012a\u00b8", - "\u0120\u00d0\u00b4\u00d0\u00be\u00d0\u00b1\u00d0\u00b0\u00d0\u00b2", - "\u0120Nelson", - "\u00c3\u00a1fic", - "\u0120MICHAEL", - "\u0120mechanic", - "\u0120metres", - "\u0120oczywi\u00c5\u013dcie", - "\u0120Cind", - "\u0120ogs\u00c3\u00a5", - "\u0120landsca", - "ACE", - "\u0120headlines", - "\u0120catalyst", - "\u0120Catch", - "inkles", - "\u0120pills", - "ordo", - "\u0120immigrant", - "\u0120examination", - "\u0120accidents", - "z\u00c4\u0127d", - "\u0120quiere", - "\u0120nella", - "\u012067", - "\u0120passa", - "\u0120superfic", - "istor", - "\u0120nov", - "\u00eb\u012d\u00b5", - "\u0120mandate", - "isons", - "\u0120Virtual", - "\u0120selber", - "\u0120counseling", - "\u0120NBA", - "\u0120sept", - "\u0120believer", - "\u0120marvel", - "\u0120Integr", - "\u0120\u00d0\u00bc\u00d1\u0138", - "\u0120orph", - "\u0120backward", - "\u0120Generation", - "\u0120Pict", - "\u0120\u00d1\u0124\u00d0\u00be\u00d1\u0124", - "\u0120tapi", - "prochen", - "\u0120hallway", - "hte", - "\u0120\u00db\u0123\u00db\u0134", - "\u0120Zum", - "\u00e8\u0122\u0123\u00e5\u00b8\u00ab", - "achment", - "iquer", - "folg", - "\u0120Eddie", - "\u0120Kil", - "\u0120wellness", - "stock", - "\u00e8\u00bc\u0125", - "\u0120ka\u00c3\u00a7", - "\u0120terrorism", - "\u0120pointer", - "Of", - "heric", - "\u0120Ultimately", - "\u0120meses", - "\u0120Trade", - "\u0120pint", - "\u0120tuition", - "\u0120disagre", - "\u0120\u00ea\u00b2\u012e\u00ec\u0140\u0126", - "\u0120manuscript", - "\u0120roomm", - "\u0120outputs", - "\u00d0\u00b5\u00d1\u0128\u00d0\u00b8", - "\u0120ries", - "\u0120salud", - "otzdem", - "\u0120masses", - "\u0120by\u00c5\u0124a", - "\u0120clearing", - "\u0120discourse", - "atson", - "\u0120folded", - "\u0120Jar", - "\u00d9\u0126\u00d9\u012b", - "900", - "\u0120\u00d1\u0125\u00d1\u0123\u00d0\u00bf", - "\u0120prophecy", - "\u0120interfere", - "\u00d0\u00b8\u00d1\u0127\u00d0\u00be\u00d0\u00b4", - "\u00e0\u00b9\u012e", - "\u0120thri", - "\u0120\u00d7\u0140\u00d7\u00a9", - "\u0120laz\u00c4\u00b1m", - "\u01201992", - "\u0120futuro", - "\u0120locking", - "\u0120embargo", - "\u0120Neither", - "ivamente", - "\u0120m\u00c3\u00a5ste", - "\u0120mik", - "\u0120collector", - "\u00d0\u00b5\u00d0\u00ba\u00d0\u00be\u00d1\u0124\u00d0\u00be\u00d1\u0122", - "\u0120Gand", - "\u0120sentir", - "\u0120Might", - "\u00e5\u00a1\u0136", - "\u0120ganzen", - "UC", - "\u0120relating", - "SD", - "\u0120mosquito", - "GR", - "\u0120hollow", - "\u00e2\u013a\u0127", - "\u0120Walker", - "\u0120affiliate", - "\u0120duplicate", - "\u00d0\u00bd\u00d0\u00b5\u00d0\u00bc", - "\u0120grape", - "\u0120Organization", - "\u0120synt", - "Joe", - "\u0120geg", - "\u0120revealing", - "\u0120Ethan", - "outer", - "\u0120yay", - "\u00e9\u00ab\u0136", - "\u00d0\u00bb\u00d0\u00b0\u00d1\u0122", - "\u0120reportedly", - "\u0120ihrer", - "\u0120recognise", - "\u0120bumper", - "\u0120Randy", - "\u0120Venus", - "tles", - "\u0120appetite", - "\u0120glucose", - "\u0120chodzi", - "\u0120Furthermore", - "tir", - "\u0120conta", - "\u0120intuition", - "\u0120altitude", - "\u0120chunks", - "\u0120Joshua", - "\u00c4\u00b1\u00c4\u0141\u00c4\u00b1m", - "rylic", - "leans", - "\u0120\u00ed\u0136\u00bc\u00eb", - "LL", - "Que", - "\u0120gor", - "\u0120\u00d0\u00b7\u00d0\u00bd\u00d0\u00b0\u00d1\u0129\u00d0\u00b8\u00d1\u0124", - "\u0120poems", - "\u0120excel", - "\u0120explored", - "\u0120popul", - "\u0120incluso", - "st\u00c3\u00a4", - "\u0120Gavin", - "alling", - "\u0120\u00cf\u0126\u00ce\u00bf\u00ce\u00bd", - "\u00e9\u00a9", - "arbeit", - "\u0120Gas", - "\u0120glorious", - "rieben", - "\u0120spam", - "\u0120indoor", - "\u0120thrust", - "\u0120Ald", - "\u0120Prior", - "\u0120onboard", - "\u00e3\u0123\u0142\u00e3\u0123\u0137\u00e3\u0123\u0126", - "oca", - "ASH", - "\u00a3\u0142", - "\u0120Christine", - "\u0120drawer", - "\u0120noon", - "\u0120\u00ec\u0140\u013a\u00eb", - "\u0120permanently", - "\u00e6\u00b7\u00b1", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d0\u00bf\u00d1\u0122\u00d0\u00b8\u00d0\u00bc\u00d0\u00b5\u00d1\u0122", - "\u0120podcasts", - "erapeut", - "prit", - "\u0120stainless", - "\u0120\u00da\u00a9\u00db\u0134", - "\u0120familia", - "\u0120\u00d1\u0122\u00d0\u00b0\u00d0\u00b7\u00d1\u0122", - "unto", - "\u0120\u00d1\u0123\u00d1\u0124\u00d0\u00be\u00d0\u00bb", - "\u0120h\u00c3\u00a4", - "\u0120Hai", - "\u0120PB", - "izon", - "\u0120konnte", - "\u0120b\u00c3\u00bcy\u00c3\u00bck", - "\u0120utilizar", - "\u00da\u0128", - "\u0120aquesta", - "\u0120mixer", - "udent", - "\u00d0\u00bb\u00d0\u00b5\u00d0\u00ba\u00d1\u0123", - "\u00c5\u0124u", - "\u0120\u00d1\u0123\u00d0\u00b8\u00d1\u0123\u00d1\u0124\u00d0\u00b5\u00d0\u00bc", - "\u0120\u00d0\u00bd\u00d0\u00be\u00d1\u0122\u00d0\u00bc", - "\u0120fatal", - "\u0120considerations", - "\u0120validation", - "\u0120oli", - "\u0120karde\u00c5\u0141", - "\u0120GLORIA", - "\u0120pall", - "\u00d0\u00b5\u00d1\u0123\u00d1\u0124\u00d0\u00b5", - "\u0120rectang", - "\u0120medieval", - "allahi", - "asti", - "\u0120Syrian", - "\u0120shear", - "\u0120debug", - "\u0120Mai", - "\u0120knocking", - "\u0120Lex", - "ardan", - "rov", - "\u0120memorial", - "\u00e6\u00b0\u00a3", - "ooky", - "\u0120stuffed", - "\u0120pass\u00c3\u00a9", - "\u0120wig", - "\u0124\u0142", - "\u0120pr\u00c3\u00b3xima", - "\u01201991", - "\u0120\u00d0\u00bc\u00d0\u00b5\u00d0\u00b6\u00d0\u00b4\u00d1\u0125", - "\u0120nuestros", - "\u0120Beast", - "\u0120smo", - "atched", - "ologia", - "\u0120\u00d0\u00bc\u00d0\u00be\u00d0\u00b4", - "\u0120gee", - "\u0120conceptual", - "\u0120\u00c3\u00b4", - "\u0120decreases", - "\u0120queries", - "\u00d0\u00be\u00d0\u00bb\u00d1\u012e\u00d1\u012a", - "\u0120Apart", - "\u0120exempl", - "\u00e5\u00b1\u00b1", - "\u0120fled", - "\u0120OFF", - "ggak", - "\u0120bead", - "hir", - "lies", - "\u0120Clearly", - "\u00c4\u00b1lar", - "\u0120chess", - "\u0120whichever", - "\u012096", - "\u00e1\u00ba\u00b1", - "\u0120respects", - "\u0120\u00d0\u00bc\u00d0\u00be\u00d1\u0122", - "\u0120organism", - "\u0120grandpa", - "\u0120Vie", - "\u00e8\u00b7\u0141\u00e4\u00bd\u0142", - "\u0120flooding", - "\u0120upgraded", - "\u00d1\u0133\u00d1\u0122", - "\u0120cheeks", - "\u0120conquer", - "\u0120stubborn", - "\u0120puzzles", - "\u0120auction", - "\u0120relying", - "\u0120PROF", - "\u0120Esper", - "\u0120\u00d0\u013e\u00d0\u00a3", - "\u0120hype", - "\u0120possibil", - "\u0120imprison", - "\u0120Ern", - "\u00ec\u0139\u012a\u00ec\u012c\u00b5\u00eb\u012d\u012a\u00eb\u012d\u00a4", - "\u0120envie", - "\u0120resurrection", - "\u00e4\u00b8\u012f\u00e8\u00a1\u012e", - "\u0120sper", - "\u0120Venezuela", - "som", - "\u0120\u00ec\u0140\u0142\u00ea\u00b9", - "\u0120nouvelle", - "\u0120closes", - "\u01201940", - "\u0120qua", - "\u0120Jared", - "\u0120Pir", - "\u0120inde", - "\u0120scrub", - "uku", - "\u0120requiring", - "\u0120\u00d0\u00b2\u00d0\u00b0\u00d0\u00bc\u00d0\u00b8", - "\u0120considerable", - "\u00e5\u0132\u013d", - "ilia", - "\u0120inne", - "\u0120meinem", - "\u0120hardship", - "\u0120traps", - "roc", - "\u0120\u00ec\u0126\u00a4\u00eb", - "\u0120researching", - "\u0120Margaret", - "\u0120penny", - "\u0120b\u00c4\u00b1rak", - "\u00d1\u0133\u00d0\u00bb", - "\u0120wool", - "\u0120rhet", - "\u0120flatten", - "\u00e7\u0129", - "\u00e0\u00b9\u0122\u00e0\u00b8\u00a3", - "\u0120pied", - "\u0120Chap", - "\u0120underm", - "\u0120fret", - "\u0120crashed", - "\u0120Frauen", - "\u00d8\u00b0\u00d9\u0129", - "ivan", - "\u0120literary", - "latego", - "\u0120sp\u00c3\u00a4ter", - "\u0120similarities", - "\u00e2\u0128", - "\u0120Coron", - "\u0120Creek", - "\u0120bosses", - "\u0120accompanied", - "\u0120debates", - "\u0120assembled", - "\u0120\u00c3\u0123", - "\u0120Vai", - "\u0120tract", - "\u0120simplement", - "\u0120Arin", - "\u0120vulnerability", - "\u0120hormone", - "IEL", - "OOK", - "\u0120relay", - "\u0120Andrea", - "ril", - "\u0120necessity", - "aceutical", - "\u00d1\u0130\u00d1\u012b", - "ousing", - "nahmen", - "\u0120footprint", - "map", - "\u0120Tier", - "annya", - "intend", - "\u00e5\u0138\u00ae", - "\u00e5\u00a2", - "\u0120decorate", - "\u0120zombies", - "\u0120Hyd", - "\u0120Suz", - "\u0120campuses", - "\u0120Emb", - "\u0120throttle", - "\u0120admin", - "\u0120oportun", - "\u0120mirrors", - "\u0120identities", - "\u0120Clin", - "\u0120\u00eb\u00b9\u0126\u00eb", - "\u00e1\u00b9\u00a3", - "\u0120Ott", - "\u0120blues", - "\u0120impressions", - "-,", - "\u0120vague", - "afe", - "\u0120inferior", - "erald", - "\u0120medicines", - "\u0120pregunta", - "osely", - "\u0120t\u00c3\u00a9l\u00c3\u00a9", - "\u0120Month", - "\u0120Leaders", - "\u0120Egyptian", - "\u0120ration", - "kers", - "heits", - "\u0120recht", - "Play", - "\u0120eg", - "\u0120polls", - "\u0120WOODR", - "\u0120slots", - "jam", - "Both", - "\u0120Rat", - "\u00d1\u0122\u00d0\u00b0\u00d0\u00b6", - "\u0120Bright", - "\u00e4\u00b8\u0122\u00e5\u00ae\u013c", - "\u00e1\u00bb\u0133i", - "urious", - "\u0120singers", - "\u0120login", - "\u0120t\u00c3\u00aam", - "lation", - "\u0120Mum", - "\u00c6\u00b0\u00e1\u00bb\u013fng", - "\u0120Editor", - "\u00e5\u0132\u0133", - "\u0120innovations", - "have", - "\u0120Sek", - "\u0120weaker", - "\u0120Gob", - "After", - "\u00b4\u00ec\u00a7\u0122", - "\u0120\u00eb\u00ac\u00b8\u00ec\u0142\u013e", - "\u00e3\u0125\u00bc\u00e3\u0125\u00bc", - "\u0120disadvantage", - "\u00e7\u00a2\u00ba", - "\u0120gaze", - "\u0120Mack", - "\u00cf\u0123\u00ce\u00af", - "\u0120Kiss", - "\u0120Holo", - "\u0120Birth", - "izi", - "bab", - "\u00e4\u00bf\u013f", - "\u00ec\u012d\u013e\u00ea\u00b3\u0142", - "\u00d0\u00b4\u00d0\u00b5\u00d1\u0122\u00d0\u00b6", - "\u0120squat", - "\u00d0\u00ba\u00d1\u0125\u00d1\u0123", - "uni", - "\u0120Comme", - "\u0120WOODRUFF", - "\u0120Championship", - "\u0120welche", - "\u0120Youth", - "zem", - "\u0120odpow", - "\u0120persistent", - "rut", - "\u00ec\u0136\u00a9", - "\u00ed\u0138\u00a5", - "lair", - "iku", - "\u0120vendor", - "\u0120ch\u00c3\u00bang", - "\u0120financi", - "\u0120overly", - "\u00c3\u00a2u", - "\u0120gluten", - "\u01201800", - "\u0120divisions", - "\u0120ciudad", - "\u0120obed", - "\u0120warum", - "\u0120eher", - "\u0120elim", - "\u0120\u00d0\u0134\u00d0\u00be", - "\u0120peuvent", - "\u0120Wanna", - "\u0120attendance", - "\u0120assessments", - "\u0120Bog", - "\u0120imagery", - "\u0120collectively", - "\u0120informal", - "\u0120Schwe", - "\u0120deutlich", - "\u0120Chel", - "\u0120PE", - "owed", - "\u0120banner", - "\u0120shelves", - "\u0120Return", - "\u00e6\u012d\u00bf", - "LAUGHS", - "\u0120congratulate", - "\u0120Norway", - "\u0120dwell", - "\u0120Caribbean", - "\u0120norms", - "\u0120Animal", - "\u0120Valentine", - "\u0120extending", - "\u0120Vou", - "orr", - "\u0120Cheng", - "\u00c2\u00a1", - "\u0120\u00d0\u00b4\u00d0\u00be\u00d1\u0122\u00d0\u00be\u00d0\u00b3", - "\u0120veg", - "\u0120h\u00c3\u00a5", - "\u0120Xin", - "\u0120\u00ec\u00b9\u00b4\u00eb", - "emet", - "\u0120hypoth", - "\u0120interessante", - "rices", - "IZ", - "\u0120USD", - "\u0120runner", - "\u0120Bag", - "\u0120\u00ea\u00bd", - "\u0120come\u00c3\u00a7ar", - "\u0120pigs", - "\u0120weaknesses", - "Ph", - "\u0120Viol", - "\u00e4\u00b8\u012f\u00e7\u0136\u00a8", - "\u0120dragging", - "\u0120Aqu\u00c3\u0143", - "\u0120CSS", - "\u0120millimeters", - "\u0120est\u00c3\u00a1s", - "\u0120acute", - "\u0120dejar", - "i\u00c4\u0141", - "obra", - "Love", - "\u0120silk", - "****", - "\u0120joins", - "\u0120prol", - "\u0120\u00ea\u00b0\u0132\u00ec\u0124\u00ac\u00ed\u0137\u00a9\u00eb\u012d\u012a\u00eb\u012d\u00a4", - "\u00e6\u0136\u00af", - "\u00d8\u0143\u00d8\u00af", - "aghetti", - "\u00c3\u00a4nner", - "\u0120strang", - "\u0120doubled", - "\u0120descriptions", - "\u0120stellen", - "\u0120parti", - "\u00e7\u00ab\u012d", - "\u00b2\u0126\u00eb", - "\u0120\u00c3\u00b6\u00c4\u0141", - "ighing", - "\u0120angular", - "\u0120natuur", - "\u0120Shel", - "\u00c6\u00b0\u00c6\u00a1", - "\u0120rays", - "\u0120seper", - "start", - "vised", - "\u0120rushed", - "\u0120internationally", - "\u0120nivel", - "\u0120boxing", - "fallen", - "\u00e1\u00bb\u0133c", - "\u0120seinen", - "plicity", - "\u0120carboh", - "\u0120Travis", - "uso", - "\u0120Phase", - "\u0120activation", - "\u0120opio", - "\u00b7\u00a8", - "\u0120decreased", - "Car", - "\u0120bundle", - "\u0120expend", - "ormal", - "\u0120adjacent", - "\u0120mee", - "\u0120\u00d0\u00be\u00d1\u0122\u00d0\u00b3", - "\u0120transcript", - "\u0120Language", - "GS", - "\u00e8\u00a7\u012b", - "\u0120seul", - "\u00c3\u0142nh", - "\u0120nya", - "nings", - "\u0120\u00ec\u012d\u013e\u00eb", - "\u0120\u00eb\u0136\u00b0\u00eb\u013f\u00bc", - "\u0120Agr", - "\u00c3\u0143d", - "\u00e7\u0137\u013b", - "\u0120aby", - "\u0120Neo", - "\u00c4\u00b1yoruz", - "\u0120Thinking", - "aime", - "\u0120vite", - "\u0120trav\u00c3\u00a9s", - "\u0120\u00d7\u0133\u00d7\u00a2", - "\u0120\u00d0\u00bc\u00d0\u00b5\u00d0\u00b4", - "Our", - "hoot", - "\u0120liner", - "\u0120Pizza", - "\u0120hyg", - "flies", - "\u0120Continue", - "\u0120dental", - "\u0120Tib", - "\u0120regulate", - "lie\u00c3\u0141", - "ALK", - "\u0120Tae", - "\u00ea\u00b8\u00b8", - "\u0120Brexit", - "\u0120Gut", - "\u0120occupation", - "\u0120zrobi", - "\u00c3\u00a2m", - "\u0120whisk", - "\u00e4\u00b8\u0138\u00e7\u0137\u012e", - "\u0120kanske", - "omon", - "robe", - "\u0120warfare", - "\u0120th\u00e1\u00bb\u0125", - "\u0120jaki", - "\u0120strokes", - "\u0120peas", - "\u0120Damit", - "HAN", - "\u0120interference", - "\u0120\u00d0\u00bc\u00d0\u00b8\u00d0\u00bd\u00d1\u0125\u00d1\u0124", - "NER", - "outing", - "\u0120textures", - "\u0141\u012b", - "owi", - "\u0120\u00ed\u0137\u013b", - "\u0120dens", - "\u0120protagonist", - "\u00c3\u00a4nn", - "\u0120goddess", - "\u0120wollte", - "ijo", - "\u0120Woche", - "\u0120VPN", - "story", - "\u0120kinderg", - "\u0120funnel", - "\u0120distress", - "\u00d0\u00bd\u00d0\u00be\u00d1\u0123\u00d1\u0124\u00d1\u012e\u00d1\u0130", - "\u0120noisy", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d0\u00b4\u00d0\u00be\u00d0\u00bb\u00d0\u00b6", - "\u0120daran", - "\u0120enzyme", - "\u00d0\u00bb\u00d0\u00be\u00d0\u00b6", - "\u0120mute", - "\u0120dwar", - "\u0120\u00d8\u00a7\u00d8\u00b3", - "\u0120kompl", - "\u0120merit", - "\u0120fosse", - "\u0120Drink", - "\u0120fora", - "\u0120wohl", - "\u0120breeze", - "\u0120sanit", - "\u0120drin", - "\u0120\u00ec\u013f\u00b4\u00ea\u00b1\u00b0\u00eb\u012c\u0136", - "\u012062", - "\u0120\u00ec\u00b0\u00a8\u00eb", - "abytes", - "\u0120deeds", - "\u0120\u00d0\u00b9", - "i\u00c3\u00a8me", - "iggling", - "\u0120\"'", - "\u0120\u00d1\u0129\u00d0\u00b0\u00d1\u0123\u00d1\u0124\u00d1\u012e", - "\u0120Answer", - "\u0120evangel", - "\u01201080", - "\u0120Visit", - "icient", - "\u0120reliability", - "\u00d1\u0130\u00d1\u0123\u00d1\u012e", - "\u0120Earlier", - "\u0120fid", - "\u00e7\u0143\u012b\u00e4\u00b8\u0122\u00e4\u00b8\u012d", - "\u0120sleeves", - "iyorsun", - "\u0120bib", - "\u0120Account", - "\u00d1\u0131\u00d0\u00bb\u00d0\u00b8", - "ciplinary", - "zas", - "\u0120\u00d0\u00b1\u00d0\u00b5\u00d1\u0122", - "\u0120necklace", - "\u0120blender", - "\u0120Phillips", - "eti", - "\u0120Jupiter", - "\u0120provoc", - "\u0120Years", - "entre", - "acio", - "\u0120k\u00c3\u00bc", - "\u0120antenna", - "\u0120novels", - "\u0120fart", - "\u0120Sugar", - "\u0120Judy", - "\u0120collapsed", - "\u00e7\u00b0", - "ritis", - "\u0120\u00ec\u0125\u0123\u00ed\u013b\u00a9", - "\u00d0\u0139\u00d0\u00ab", - "\u0120Verf", - "ranean", - "ereum", - "\u0120Target", - "\u012088", - "\u0120\u00d0\u013a\u00d0\u00b7", - "ideo", - "\u0120regression", - "\u00ec\u00b6\u013e", - "\u0120m\u00c3\u00b3wi", - "\u0120studios", - "iens", - "iph", - "\u0120frying", - "\u0120fascinated", - "\u0120Wah", - "bucks", - "maya", - "\u0120Saturn", - "\u0120Mommy", - "\u0120ratings", - "\u0120autumn", - "\u00c6\u00b0\u00c6\u00a1ng", - "\u0120loser", - "\u0120centro", - "\u00c3\u00a9rieur", - "\u0120Fold", - "\u0120supervisor", - "\u0120Nobel", - "\u0120underest", - "obia", - "\u0120\u00d0\u00b2\u00d1\u0123\u00d1\u0131", - "\u0120verw", - "\u0120fuels", - "\u0120artifacts", - "\u0120\u00eb\u00b6\u013b", - "\u0120Autom", - "\u00e7\u013c\u0126\u00e6\u013a\u00af", - "\u00db\u0136", - "\u00d7\u0137\u00d7\u00a1", - "\u0120ihnen", - "\u012059", - "ounding", - "\u00d0\u00b5\u00d1\u0122\u00d1\u012d", - "inars", - "chant", - "\u0120addicted", - "\u0120explosive", - "\u0120dispers", - "\u00e2\u0138\u012a", - "axis", - "ARY", - "\u0120lum", - "\u0120\u00d1\u0125\u00d1\u0123\u00d0\u00bb", - "\u0120\u00d8\u012e", - "\u0120rupees", - "\u0120Pearl", - "camp", - "tv", - "oya", - "\u0120concludes", - "\u0120collision", - "\u0120buyer", - "\u0120playground", - "\u0120springs", - "\u0120feminine", - "\u0120Ras", - "\u0120incarcer", - "\u00ed\u0139\u013a", - "\u0120dialect", - "\u0120closure", - "\u0120chatting", - "\u0120babe", - "\u0120spotlight", - "\u0120notation", - "\u00e8\u00b7\u00af", - "Star", - "i\u00c3\u00a3o", - "\u0120t\u00c3\u00aate", - "\u0120tide", - "\u0120junto", - "\u0120senator", - "\u00d0\u00a5", - "\u0120excuses", - "\u0120blink", - "\u0120admission", - "\u0120Lily", - "\u00d1\u012d\u00d0\u00bc\u00d0\u00b8", - "\u0120amigo", - "\u0120lust", - "\u00eb\u012d\u00ac", - "\u0120amino", - "\u00e4\u00ba\u012d\u00e6\u0125\u0127", - "\u0120consultant", - "\u0120Electric", - "\u0120\u00eb\u0127\u00b8\u00eb\u0140\u013a", - "ujah", - "\u0120shooter", - "ichten", - "\u0120Ukrainian", - "\u0120aims", - "\u0120Entertain", - "\u0120miracles", - "\u00e8\u0143\u00b0", - "\u0120zeigen", - "\u0120lam", - "\u0120ress", - "\u0120Jill", - "ylan", - "\u0120rook", - "\u0120haya", - "\u0120passport", - "adata", - "\u0120juicy", - "conf", - "\u00d0\u00bb\u00d0\u00b5\u00d0\u00b9", - "\u0120Sz", - "\u0120intercept", - "\u00e3\u0123\u0124\u00e3\u0124\u012c\u00e3\u0123\u012e\u00e3\u0123\u00a8\u00e3\u0123\u0128\u00e3\u0123\u0136\u00e3\u0123\u0138", - "\u0120Teams", - "\u0120maken", - "irrel", - "\u0120LIKE", - "\u00e1\u00ba\u0143y", - "\u00ea\u00b5\u00b0", - "\u0120shortage", - "\u0120paradigm", - "\u0120papel", - "\u0120astero", - "\u00e3\u0123\u00be\u00e3\u0123\u0141", - "\u0120sollen", - "\u0120Mickey", - "\u0120Orleans", - "\u0120cholesterol", - "\u0120goose", - "\u00d1\u0128\u00d0\u00b8\u00d1\u0130", - "\u00e3\u0123\u0124\u00e3\u0124\u012d", - "\u0120FL", - "\u0120\u00d0\u00b3\u00d0\u00be\u00d0\u00bb\u00d0\u00be\u00d0\u00b2", - "\u0120tribute", - "\u0120Gam", - "\u0120\u00c3\u00a9videmment", - "\u00d1\u0131\u00d1\u0127", - "\u00e5\u00ae\u0140", - "\u00e7\u0136\u00b0", - "\u0120inappropri", - "uhan", - "\u0120organizational", - "ailed", - "\u0120endure", - "\u012076", - "\u0120shotgun", - "\u0120livre", - "\u0120suited", - "\u0120warmth", - "\u0120SIM", - "\u0120envision", - "\u0120degrad", - "\u00c3\u00aene", - "Laughing", - "\u0120Whoever", - "\u0120Buddhism", - "\u0120sprinkle", - "ce\u00c4\u0141iz", - "\u0120ruins", - "\u0120starch", - "\u0120Herz", - "\u0120injustice", - "\u0120humidity", - "\u00d0\u00be\u00d0\u00b6\u00d0\u00b0\u00d0\u00bb\u00d1\u0125\u00d0\u00b9", - "\u0120Object", - "\u0120Ign", - "\u0120Exam", - "igers", - "\u0120thou", - "\u0120Soy", - "ivas", - "\u0120poles", - "math", - "\u0120\u00d0\u00b2\u00d0\u00bd\u00d0\u00b8\u00d0\u00bc", - "INGING", - "edral", - "\u0120explor", - "\u0120roasted", - "\u0120crawl", - "\u0120coff", - "\u0120anom", - "\u0120wij", - "\u0120improves", - "\u0120treaty", - "\u0120discovering", - "\u0120statute", - "\u0120mercado", - "\u0120\u00d1\u0123\u00d0\u00b8\u00d0\u00bb", - "\u0120intel", - "\u0120Chancellor", - "\u0120Medicaid", - "ugi", - "\u0120verbal", - "\u0120d\u00c3\u00b6n", - "\u0120scripture", - "\u0120iteration", - "eks", - "\u0120Oxford", - "\u0120w\u00c3\u00a4h", - "\u0120Vad", - "\u0120AK", - "\u0120\u00ec\u0137\u0126\u00ec\u013f\u00b4\u00eb", - "\u0120iets", - "\u0120needles", - "\u00d9\u0125\u00d9\u0127", - "\u0120pasado", - "\u0120albums", - "\u0120yea", - "etzen", - "\u0126\u00eb\u0131\u0126", - "\u0120determines", - "\u0120thee", - "\u0120Playing", - "\u00c3\u00a4rt", - "\u0120\u00d7\u00a6", - "cled", - "\u0120downward", - "alone", - "\u0120solu", - "\u0120partition", - "\u0120wz", - "dd", - "\u0120pessoal", - "\u00e5\u00aa\u00bd", - "\u0120factories", - "\u0120bleibt", - "\u00e0\u00b8\u00a1\u00e0\u00b8\u00b2", - "alsa", - "\u0120NFL", - "\u0120fuera", - "\u0120reserved", - "\u0120Earn", - "\u0120helt", - "\u0120shortcut", - "\u0120convincing", - "space", - "\u0120enforce", - "\u0120cores", - "\u0120efter", - "\u0120recession", - "xico", - "\u0120proposition", - "arians", - "ropol", - "\u0120\u00eb\u00aa\u00b0\u00eb", - "\u0120\u00ce\u013e", - "\u0120\u00ec\u013c\u0136\u00ec\u00a6\u013a", - "\u0120activist", - "\u0120conviction", - "\u0120zab", - "\u0120canceled", - "\u00d1\u0124\u00d0\u00be\u00d1\u0129\u00d0\u00bd\u00d0\u00be", - "\u0120\u00ce\u00ae", - "\u00e9\u0122\u013b\u00e6\u00a8\u00a3\u00e5\u0143\u0132", - "nite", - "\u0120fundra", - "buzzer", - "\u00d0\u00b5\u00d0\u00bb\u00d0\u00be", - "ications", - "\u0120zona", - "\u0120teens", - "\u0120methodology", - "\u0120\u00ec\u00a4\u0133\u00ec\u013c\u0136", - "than", - "\u0120Ul", - "\u0120Grey", - "\u0120hog", - "INK", - "\u0120Sung", - "\u0120Claud", - "\u0120CNN", - "\u0120delivers", - "alin", - "\u0120Adobe", - "othe", - "\u0120Deswegen", - "\u00e0\u00b8\u00b3", - "\u0120werde", - "\u0120grease", - "\u0120upgrades", - "\u0120Finland", - "accept", - "\u0120interrog", - "bee", - "\u0120\u00e3\u0123\u00ab", - "\u0120prede", - "\u0120Nep", - "\u0120Cambridge", - "\u0120graphs", - "\u0120haunted", - "\u00d1\u0123\u00d0\u00b5\u00d0\u00bc", - "\u00e6\u00a7", - "\u00e5\u0127\u012d", - "Some", - "\u0120Mall", - "\u0120rehearsal", - "\u0120Urban", - "\u0120Lag", - "\u0120nim", - "\u00ea\u00b0\u0137", - "\u0120positioned", - "\u0120avoided", - "EMA", - "\u0120llegar", - "\u0120r\u00c3\u00a1pido", - "\u0120gouvern", - "\u0120hing", - "\u0120dealer", - "\u0120reforms", - "\u0120fatty", - "\u00d0\u00ba\u00d0\u00be\u00d0\u00bb", - "\u0120Ace", - "\u0120nep", - "\u0120\u00ec\u00b2\u0143", - "\u0120computation", - "\u0120Stream", - "bourne", - "tur", - "Por", - "\u0120sleepy", - "\u0120banget", - "\u00e3\u0123\u0124\u00e3\u0123\u00ae", - "\u0120weighs", - "\u0120bleiben", - "\u0120Gren", - "\u0120unions", - "\u0120\u00ea\u00b5\u0132", - "\u0120aprender", - "uitar", - "\u0120Jest", - "uming", - "\u0120Player", - "\u0120Extrem", - "\u0120integer", - "\u00d0\u00b0\u00d1\u0129\u00d0\u00b5", - "\u0120concerts", - "\u00d7\u0137\u00d7\u013d", - "\u0120troch\u00c4\u013b", - "\u0120Repe", - "\u00e9\u0129\u012f\u00e8\u00a6\u0123", - "\u00e0\u00b9\u0124", - "\u00c5\u00bcen", - "\u0120sounding", - "\u0120anonymous", - "\u0120exca", - "\u0120Iranian", - "\u0120energetic", - "\u0120wives", - "\u0120\u00d1\u0128\u00d0\u00b2\u00d0\u00b5\u00d1\u0124", - "\u0120ais", - "\u00e3\u0123\u012d\u00e3\u0123\u00aa", - "\u0120sudah", - "\u0120underwear", - "\u0120crunchy", - "\u0120Pain", - "\u0120ger\u00c3\u00a7ek", - "redict", - "\u0120misma", - "\u00d1\u0138\u00d1\u0124", - "\u0120surviving", - "\u00ce\u0143\u00cf\u0124", - "\u0120participant", - "\u0120Hessen", - "\u00c3\u00a1rias", - "\u0120subway", - "ist\u00c3\u00a4", - "\u0120coral", - "\u0120marijuana", - "\u0120Memorial", - "\u00d1\u012a\u00d0\u00b8\u00d0\u00b9", - "riz", - "\u0120satellites", - "\u0120lease", - "\u0120Cameron", - "umph", - "\u0120classmates", - "\u00c3\u00a4h\u00c3\u00a4n", - "\u00d1\u0123\u00d1\u0124\u00d0\u00b2\u00d0\u00b5", - "\u0120hue", - "\u0135\u00a4\u00ec\u013f\u0126", - "\u0120proportional", - "\u0120noss", - "\u0120laps", - "r\u00c3\u00a5", - "\u0120bitcoin", - "\u00d0\u0139\u00d0\u00ab\u00d0\u013c\u00d0\u0132", - "\u0120\u00ec\u00b6\u00a9", - "\u0120\u00d9\u0126\u00d9\u0126", - "\u0120Mort", - "\u0120Esp", - "arnos", - "\u0120\u00d1\u0123\u00d0\u00ba\u00d0\u00b0\u00d0\u00b7\u00d0\u00b0\u00d0\u00bb", - "\u0120\u00c3\u00a4nd", - "\u00e5\u0127\u0126", - "\u00d7\u013b\u00d7\u013b\u00d7\u013f", - "\u0120Geb", - "gehen", - "Inaudible", - "borough", - "\u00d1\u0126\u00d1\u0126", - "\u0120fellowship", - "\u0120Paper", - "\u0120curved", - "\u0120GEOR", - "\u0120calculator", - "\u0120Catal", - "\u0120v\u00c3\u0142o", - "\u0120bypass", - "\u00d0\u00bb\u00d0\u00b5\u00d1\u0124", - "\u00e0\u00b3", - "trans", - "rencies", - "\u00ec\u00a1\u012e", - "igent", - "\u0120tasted", - "\u0120oceans", - "uft", - "ervice", - "\u0120\u00d0\u013e\u00d0\u00a3\u00d0\u0139\u00d0\u00ab\u00d0\u013c\u00d0\u0132", - "\u0120Classic", - "\u0120respectively", - "~)", - "\u00c3\u00aetre", - "\u0120Nash", - "\u0120zit", - "\u0120\u00ec\u013d\u0125", - "\u0120\u00eb\u0128\u0134", - "quote", - "\u0120Uns", - "\u0120tac", - "\u0120proves", - "\u0120Portland", - "bly", - "\u0120ere", - "\u00ec\u00b6\u0136", - "\u0120\u00c3\u00a9poca", - "\u0120\u00d1\u0124\u00d1\u012d\u00d1\u0123\u00d1\u0131\u00d1\u0129", - "76", - "\u0120hade", - "\u0120Fro", - "\u0120pol\u00c3\u0143tica", - "tag", - "\u0120\u00ed\u0137\u0143", - "\u0120sch\u00c3\u00b6", - "arett", - "\u0120provisions", - "\u0120motors", - "\u0120imaging", - "\u0120dok", - "ulously", - "\u0120meille", - "\u00e7\u0130\u00b0\u00e5\u013e\u00a8", - "\u00eb\u0132", - "\u0120ISO", - "\u0120STEM", - "\u0120Bowl", - "\u0120towers", - "\u0120Ee", - "\u0120Performance", - "\u0120loin", - "cussion", - "\u0120coastal", - "iale", - "compass", - "\u0120spells", - "\u0120disappointing", - "\u0120\u00eb\u00b2\u012a\u00ec\u00a7\u00b8", - "EER", - "\u0120versatile", - "asury", - "\u0120enfin", - "\u0120downside", - "\u0120guiding", - "\u0120\u00d8\u00a7\u00d9\u0126\u00d9\u0124", - "\u0120ninety", - "charged", - "\u0120Fans", - "\u0120philosophical", - "\u0120garn", - "\u0120m\u00c3\u00a5nga", - "\u0120willingness", - "\u0120portions", - "aben", - "\u0120\u00ef", - "\u00c2\u00bf", - "raul", - "\u0120sprint", - "ifen", - "\u00c4\u00b1yla", - "\u0120\u00d0\u00ba\u00d1\u0125\u00d0\u00bf", - "\u00e3\u0123\u0131\u00e3\u0123\u0142\u00e3\u0123\u0137\u00e3\u0123\u0126", - "\u0120ensuite", - "\u0120Capitol", - "\u012063", - "\u0120\u00d0\u00b3\u00d0\u00be\u00d0\u00b2\u00d0\u00be\u00d1\u0122\u00d0\u00b8\u00d1\u0124", - "\u0120appointments", - "\u00e6\u012b\u00be", - "omiast", - "\u0120careg", - "\u0120publisher", - "\u0120heraus", - "\u0120\u00ce\u00b5\u00ce\u00af", - "\u0120VS", - "\u00e3\u0123\u013f\u00e3\u0123\u0139\u00e3\u0123\u00a6", - "\u00e4\u00b8\u0143\u00e5\u0127\u00b1", - "\u0120sacrifices", - "third", - "\u0120humanitarian", - "\u0120\u00eb\u0124\u00b4\u00ec", - "imon", - "\u0120inequ", - "\u0120zob", - "\u0120comfortably", - "\u0120Dinge", - "\u0120cancelled", - "\u0120PSAKI", - "\u0120Robinson", - "\u0120fins", - ")?", - "\u0120Histor", - "\u0120\u00d1\u0129\u00d0\u00b5\u00d0\u00bb\u00d0\u00be\u00d0\u00b2\u00d0\u00b5\u00d0\u00ba\u00d0\u00b0", - "\u0120tbsp", - "text", - "kim", - "\u0120updating", - "\u0120geld", - "feld", - "\u0131\u00bc", - "\u0120m\u00c3\u00a4", - "\u0120caf\u00c3\u00a9", - "\u00d6\u0122", - "\u0120Sri", - "\u0120Region", - "\u0120Hahaha", - "\u0120finances", - "\u0120\u00d8\u00a7\u00d9\u0126\u00d8\u00b4", - "\u0120bunk", - "ruk", - "haft", - "\u0120lateral", - "\u0120extensions", - "\u0120\u00ec\u0137\u0126\u00ec\u013f\u00b4", - "\u0120definite", - "\u0120Zhao", - "\u0120Luis", - "sty", - "\u0120casos", - "\u0120Klim", - "\u01201993", - "\u0120realization", - "\u0120historian", - "\u0120cracked", - "\u00eb\u0124\u00b4", - "\u0120syst\u00c3\u00a8me", - "\u0120CIA", - "\u0120\u00d1\u0124\u00d0\u00b2\u00d0\u00be", - "ospheric", - "\u0120flee", - "\u0120r\u00e1\u00ba\u00a5t", - "\u0120Regardless", - "\u0120reluct", - "\u0120timely", - "\u0120Julian", - "GM", - "\u00e9\u0134", - "adura", - "\u00e9\u00a3\u0141", - "\u0120dresses", - "\u00e7\u0123\u00a3", - "\u0120\u00eb\u0136\u0136", - "\u0120nominated", - "\u0120advocates", - "ymph", - "\u0120recordings", - "\u0120deviation", - "\u0120prioritize", - "\u0120spiral", - "\u0120YOUR", - "\u0120transpose", - "ampoo", - "\u0120\u00ec\u013d\u0132\u00eb\u0140\u013a", - "\u0120Vision", - "\u0120polite", - "\u0120hamb", - "\u0120Patient", - "\u00e6\u00af\u0136\u00e8\u00bc\u0125", - "\u00ed\u0123\u00ac\u00eb", - "\u0120sia", - "\u0120\u00ea\u00b3\u00b3", - "\u0120\u00c5\u00bee", - "\u00e8\u00a7\u0122", - "\u0120supermarket", - "\u00eb\u00b9", - "\u0120Sierra", - "\u0120grilled", - "\u0120Upon", - "\u0120absent", - "\u0120mec", - "\u0120Apollo", - "\u0120punk", - "\u0120Pa\u00c5\u0126st", - "\u0120\u00d1\u0123\u00d0\u00b2\u00d0\u00be\u00d0\u00b9", - "\u0120\u00ea\u00b1\u00b0\u00ea\u00b8\u00b0", - "Girl", - "\u0120skinny", - "\u0120Premier", - "\u0120territories", - "\u0120liability", - "\u0120jerk", - "ratic", - "\u0120dancers", - "\u0120\u00d1\u0125\u00d1\u0122\u00d0\u00be\u00d0\u00b2", - "\u0120\u00ea\u00b4\u0122\u00eb", - "only", - "\u0120Stu", - "\u0120skeleton", - "\u0120\u00eb\u0143\u0132\u00eb", - "\u0120\u00d0\u00b7\u00d0\u00b0\u00d0\u00ba\u00d0\u00be\u00d0\u00bd", - "\u00c4\u00b1kt", - "\u0120MIKE", - "\u0120l\u00c3\u00b6", - "mie", - "\u0120reiter", - "\u00e3\u0123\u0135\u00e3\u0124\u012e\u00e3\u0123\u00af", - "\u0120Kolleg", - "\u0120Adams", - "licher", - "\u0120\u00c3\u00a7ocuk", - "\u00d1\u0131\u00d0\u00b3", - "\u0120blush", - "\u0120sunshine", - "\u0120ez", - "\u0120Devil", - "\u0120\u00ea\u00b8\u00b8", - "\u0120\u00e3\u0123\u012c", - "add", - "\u0120licensed", - "\u0120vinyl", - "\u0120Czech", - "imag", - "\u0120cracking", - "\u0120\u00ec\u00ba", - "\u0120udah", - "\u0120sommes", - "\u0120\u00ec\u0138\u00bc\u00ea\u00b5", - "wa\u00c4\u0129", - "\u0120fres", - "\u00e5\u0133\u00bd", - "\u0120Walmart", - "\u0120\u00d0\u00a2\u00d0\u00b5\u00d0\u00bf\u00d0\u00b5\u00d1\u0122\u00d1\u012e", - "atisf", - "CI", - "lang", - "\u0120diffusion", - "\u00e7\u0136\u00b7", - "\u0120somos", - "\u0120Makes", - "\u00e6\u012a\u0133\u00e6\u0125\u00b3", - "\u0120Ricky", - "\u0120mucha", - "\u00ed\u0137\u00a8", - "\u0120horsepower", - "asia", - "\u0120fibers", - "\u0120erm", - "\u00d1\u0123\u00d0\u00ba\u00d0\u00b8\u00d0\u00b5", - "\u0120jeste", - "\u0120firefight", - "\u0120cuisine", - "\u0120besonders", - "dig", - "\u0120\u00ec\u00a2\u0127", - "\u0120\u00d1\u0125\u00d0\u00b6", - "\u0120tracing", - "\u0120certains", - "\u0120Apply", - "\u00d1\u012d\u00d0\u00b2\u00d0\u00b0\u00d1\u0124\u00d1\u012e", - "\u00e7\u012e", - "\u0120bru", - "\u0120YES", - "\u0120Bai", - "\u0120Dit", - "\u0120Bis", - "\u0120unle", - "\u00d1\u0123\u00d1\u0124\u00d0\u00b0\u00d1\u0124\u00d0\u00be\u00d1\u0129\u00d0\u00bd\u00d0\u00be", - "\u0120Awak", - "..\"", - "\u0120125", - "\u0120rooted", - "\u0120cautious", - "const", - "\u0120orchestra", - "\u00e7\u013e\u00bc", - "\u0120\u00d0\u00b2\u00d0\u00bd\u00d1\u0125\u00d1\u0124", - "\u0120quelqu", - "\u0120\u00d0\u00be\u00d1\u0124\u00d0\u00b2\u00d0\u00b5\u00d1\u0124", - "\u0120Method", - "\u00ec\u00b9\u013e", - "\u0120\u00ce\u00bc\u00ce\u00b1\u00cf\u0124", - "l\u00c3\u00bc", - "\u0120\u00ec\u0137\u0126\u00ea\u00b9\u012e", - "\u0120naming", - "Char", - "\u0120Sicher", - "\u0120privileged", - "\u0120Fly", - "\u0120\u00e3\u0123\u012d", - "\u00e1\u00ba\u0143t", - "\u0120advances", - "\u0120Zelda", - "\u0120andra", - "\u0120grinding", - "\u0120Edition", - "pf", - "\u0120warriors", - "\u0120hedge", - "\u0120unseren", - "\u0120\u00d1\u0123\u00d1\u0130\u00d0\u00b4\u00d0\u00b0", - "eliness", - "\u0120personalities", - "\u0120f\u00c3\u00b6", - "'M", - "\u0120\u00d1\u0124\u00d0\u00be\u00d1\u0129\u00d0\u00bd\u00d0\u00be", - "\u0120shipped", - "\u0120meteor", - "\u0120surroundings", - "\u0120Fill", - "uesta", - "\u0120Personal", - "\u0120Alle", - "ORT", - "\u00e4\u00b9\u0127", - "\u0120Sche", - "VI", - "\u0120comparable", - "damn", - "\u0120ditch", - "YAN", - "ismus", - "\u0120pickup", - "\u0120dak", - "\u0120EP", - "best", - "\u0120Sue", - "\u00c3\u00a4llt", - "\u0120popcorn", - "\u0120folding", - "home", - "\u00d0\u00b8\u00d0\u00b2\u00d0\u00b0\u00d0\u00b5\u00d1\u0124", - "\u00e5\u00b7\u00b2\u00e7\u00b6\u0135", - "\u0120annot", - "chuck", - "\u0120fierce", - "\u0120damaging", - "\u0120flop", - "\u0120pasar", - "\u0120reef", - "\u0120\u00d1\u0123\u00d0\u00b2\u00d0\u00be\u00d0\u00b5\u00d0\u00b9", - "\u0120zoo", - "overs", - "jets", - "\u0120pr\u00c3\u00a8s", - "\u0120Silicon", - "teok", - "\u0120Seth", - "atamente", - "\u0120transmitted", - "\u0120replicate", - "\u0120slim", - "\u0120Cream", - "\u00e6\u0126\u0141\u00e3\u0123\u013a", - "\u0120sidewalk", - "\u00ec\u012a\u013a\u00eb", - "\u0120\u00d0\u00b6\u00d0\u00b8\u00d0\u00b7\u00d0\u00bd\u00d1\u012e", - "\u0120Monica", - "\u00e4\u00be\u0128\u00e4\u00ba\u0128", - "\u0120copied", - "\u0120Terra", - "istent", - "\u00e7\u00b3\u00bb", - "\u0120\u00d0\u00be\u00d0\u00bd\u00d0\u00be", - "\u0120whale", - "\u0120WITH", - "\u00d0\u00bb\u00d1\u0125\u00d1\u012a", - "\u00e5\u00bd\u00b1\u00e7\u012b\u0129", - "\u0120Een", - "\u0120\u00d1\u0123\u00d0\u00b2\u00d0\u00be\u00d0\u00b8", - "\u0120ordin", - "\u0120plural", - "\u0120spokes", - "\u0120dispute", - "\u0120sensible", - "\u0120preaching", - "\u0120kt\u00c3\u00b3rzy", - "pted", - "avier", - "\u0120pistol", - "\u0120Tapi", - "\u0120\u00c5\u0124", - "ffff", - "\u0120acrylic", - "\u0120ignorance", - "\u0120Ziel", - "rans", - "\u0120welding", - "mid", - "\u00e6\u012a\u0133\u00e4\u00b8\u012f", - "\u0120\u00d0\u00b7\u00d0\u00b0\u00d0\u00bd\u00d0\u00b8\u00d0\u00bc", - "\u0120lanes", - "\u0120mines", - "\u0120moms", - "\u00d7\u0137\u00d7\u0139", - "\u0120Chamber", - "tier", - "\u0120modest", - "\u0120\u00ec\u0139\u00ac\u00ea\u00b8\u00b0\u00ec\u0126\u013e", - "\u0120unas", - "\u0120wrench", - "handed", - "\u0120saturated", - "\u0120Fang", - "\u0120Commissioner", - "\u00e0\u00a4\u00b0", - "\u0120\u00d7\u0138", - "\u0120Louisiana", - "\u0120Mask", - "\u0120cubes", - "\u00ec\u0136\u00a8", - "\u0120vid\u00c3\u00a9os", - "\u0120n\u00c3\u00a5gon", - "\u0120rider", - "\u0120\u00ec\u00b6\u013e", - "\u0120s\u00c3\u00b3n", - "\u0120Latino", - "bank", - "\u00ed\u0137\u00b4\u00ec\u00a3\u00bc", - "\u0120Brend", - "\u0120sexuality", - "...,", - "\u0120forgetting", - "\u0120\u00db\u012e", - "\u0120Avengers", - "\u0120Bonjour", - "cessor", - "\u00d0\u00ba\u00d1\u0122\u00d0\u00b0\u00d1\u0139", - "cence", - "\u0120geograph", - "culo", - "\u00d0\u00be\u00d1\u0123\u00d1\u0124\u00d1\u012e", - "\u0120sweating", - "\u00ed\u0125\u0122", - "\u0120symmetry", - "ts\u00c3\u00a5", - "\u0120jan", - "\u0120Ferr", - "\u00e9\u00a6\u0138", - "\u0120ambassador", - "zi\u00c4\u013bk", - "\u0120musun", - "\u0120\u00d1\u0125\u00d1\u0124", - "\u0120LG", - "issent", - "commun", - "\u0120cours", - "\u0120develops", - "\u0120bronze", - "\u0120substances", - "driven", - "\u00ec\u00a3\u00bc\u00ec\u0126\u00b8\u00ec\u013c\u0136", - "\u0120aos", - "\u00e5\u0126\u0126", - "\u0120PROFESS", - "half", - "\u0120sorted", - "\u0120Bomb", - "\u00d0\u00bb\u00d0\u00b0\u00d0\u00b3", - "\u0120Malaysia", - "\u0120Christina", - "\u0120teammate", - "\u00e8\u0123\u0140", - "FT", - "\u0120k\u00c4\u00b1", - "hearted", - "++", - "ogenic", - "\u0120bells", - "\u0120Ouais", - "\u0120specialists", - "\u00d0\u00b1\u00d1\u012d", - "depth", - "lasses", - "gies", - "\u0120Coffee", - "\u0120marking", - "\u0120foll", - "uli", - "\u0120adhesive", - "\u0120Bot", - "\u0120Punkt", - "eye", - "\u0120Bub", - "elong", - "\u00e5\u012a\u00b6", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b8\u00d0\u00ba", - "\u0120donor", - "84", - "\u0120enfor", - "\u0120catches", - "\u0120bricks", - "\u0120knitting", - "\u0120Knowing", - "oks", - "HY", - "ride", - "\u0120Fantasy", - "iman", - "\u0120pse", - "\u0120\u00ec\u013a\u00a8", - "\u0120\u00d0\u00b2\u00d0\u00b4", - "\u0120restra", - "\u0120evaluated", - "\u00d1\u0122\u00d0\u00b5\u00d0\u00b2", - "\u0120fortunately", - "\u0120chegar", - "\u00d8\u00b1\u00d8\u00a8", - "\u0120domains", - "ibi", - "arry", - "\u0120shutter", - "\u0120ficou", - "Mike", - "\u0120inclu", - "\u0120donors", - "\u0120apl", - "\u0120Lower", - "\u0120imported", - "\u0120academy", - "\u0120finals", - "\u0120disappears", - "\u00d9\u012c\u00d8\u00a7", - "\u0120administrator", - "js", - "\u0120cutter", - "\u0120ranging", - "\u00c3\u00b6rper", - "\u0120constraint", - "\u0120Table", - "\u0120Shan", - "vic", - "\u0120Fix", - "\u0120Swift", - "ounces", - "\u0120Warum", - "\u0120lettuce", - "appelle", - "\u0120shave", - "\u0120b\u00c3\u00a1s", - "\u012077", - "\u0120Ooo", - "ao", - "\u0120McM", - "\u0120Drew", - "\u0120lump", - "\u0120lashes", - "scheinlich", - "Rep", - "inis", - "\u0120Cette", - "\u0120composite", - "emetery", - "\u0120sorte", - "\u0120Financial", - "\u00d0\u00be\u00d0\u00bd\u00d0\u00b5", - "rones", - "\u0120Voy", - "\u0120t\u00c3\u00a9c", - "\u0142\u00b9", - "\u0120Ninja", - "\u0120Corin", - "\u00d0\u00b5\u00d0\u00bd\u00d0\u00bd\u00d1\u0131", - "\u00ec\u013f\u00b4\u00ec\u0139\u012a", - "\u0120nich", - "\u0120detective", - "\u00e2\u0122\u00a6\"", - "\u00cf\u0125\u00ce\u00b5", - "\u013f\u00bc\u00eb\u0131\u0126", - "\u0120\u00eb\u00b3\u0122", - "\u0120\u00eb\u00b8\u0136\u00eb", - "\u0120prope", - "\u0120Wright", - "\u0120\u00d7\u0136\u00d7\u00aa", - "\u0120Shi", - "\u0120\u00e3\u0123\u0141", - "\u0120investigations", - "\u00e9\u0124\u0126\u00e6\u013a\u00af", - "\u0120PowerPoint", - "\u0120Chu", - "\u0120\u00ec\u013a\u00a4\u00ed", - "\u0120\u00ec\u013b\u0126\u00ec\u0142\u0126", - "\u0120Fragen", - "unning", - "\u0120pourrait", - "\u0120textbook", - "\u00d0\u00bc\u00d1\u012d", - "\u0120fahren", - "\u0120\u00d1\u0124\u00d0\u00be\u00d1\u0122", - "\u0120lakes", - "\u00c3\u00bcnde", - "Int", - "\u0120Metro", - "\u0120mansion", - "\u0120\u00d0\u00b0\u00d0\u00b1", - "\u0120Zhou", - "\u0120corridor", - "\u0120escol", - "\u0120indicating", - "ia\u00c5\u0124a", - "\u0120mommy", - "\u0120archives", - "\u0120founders", - "engine", - "\u0120Dieu", - "\u0120sickness", - "\u0120\u00eb\u00b3\u00b4\u00eb\u012d\u012a\u00ea\u00b9\u012e", - "\u0120arb", - "\u0120ned", - "\u0120Chop", - "\u0120covid", - "\u0120slam", - "\u0120publications", - "DC", - "\u0120spends", - "\u00e6\u00be", - "\u0120refugee", - "\u0120dile", - "\u0120\u00d7\u0132\u00d7\u0138", - "ificar", - "\u0120Sach", - "Gu", - "\u0120reload", - "????", - "\u0120je\u00c5\u013dli", - "\u0120\u00d1\u0123\u00d0\u00be\u00d1\u0123\u00d1\u0124\u00d0\u00be", - "\u0120simplicity", - "\u0120bullying", - "\u0120\u00d0\u00bc\u00d0\u00be\u00d0\u00bb", - "\u0120realidad", - "\u0120unclear", - "appa", - "levant", - "\u0120ISIS", - "\u0120Watson", - "\u0120dein", - "\u0120Micro", - "\u00ed\u0137\u013e\u00eb", - "\u00c3\u00bcg", - "\u0120devam", - "\u0120tweeted", - "\u00e5\u00b0\u0130", - "\u0120understandable", - "atan", - "\u0120versa", - "\u0120preca", - "\u0120v\u00e1\u00bb\u0123", - "\u0120Copy", - "\u0120Oracle", - "\u0120mindfulness", - "\u0120discret", - "ernen", - "\u0120Ple", - "Have", - "\u0120isolate", - "\u0120deu", - "\u0120seventy", - "\u0120Hills", - "\u0120arcade", - "\u0120\u00d1\u0123\u00d0\u00bf\u00d0\u00b5\u00d1\u0128\u00d0\u00b8", - "\u0120siguiente", - "\u0120B\u00c3\u013eNDNIS", - "liga", - "\u0120\u00d0\u00b2\u00d1\u0123\u00d1\u0124\u00d1\u0122\u00d0\u00b5\u00d1\u0129", - "\u00c3\u00b4m", - "\u0120tweets", - "\u0120schauen", - "\u0120critique", - "\u0120\u00f0\u0141\u0130\u00b5", - "\u0120statt", - "\u0120\u00d1\u0123\u00d0\u00b0\u00d0\u00bc\u00d0\u00be\u00d0\u00b5", - "\u00c3\u00a2ncia", - "\u0120supernatural", - "\u0120plugged", - "Fl", - "yn\u00c4\u00b1", - "\u0120Tambi\u00c3\u00a9n", - "\u0120encouragement", - "\u0120Server", - "\u00eb\u0124\u013e", - "upa", - "\u0120aston", - "\u0120hears", - "\u00d1\u0122\u00d0\u00b0\u00d1\u0127", - "\u0120sche", - "\u0120rats", - "\u0120recuper", - "\u0120unten", - "\u0120Fighting", - "\u0120academics", - "\u00e7\u00a4\u00ba", - "\u0120S\u00c3\u00bc", - "\u00d1\u0123\u00d0\u00ba\u00d0\u00b8\u00d1\u0127", - "\u0120paired", - "\u0122\u00ec\u013f\u0126", - "\u0120\u00c3\u00a1rea", - "\u0120sweetness", - "\u00e5\u0131\u012c", - "\u0120defer", - "\u0120muitas", - "\u0120Audio", - "\u0120locker", - "\u00d9\u012c\u00d8\u00af", - "\u0120\u00d1\u0123\u00d1\u0124\u00d0\u00b0\u00d0\u00b2", - "\u0120buena", - "ANS", - "\u0120detector", - "avo", - "bek", - "\u0120\u00ce\u00b1\u00ce\u00bd", - "\u00ed\u0130\u00b8", - "\u0120dragged", - "\u0120\u00d0\u00b4\u00d0\u00be\u00d0\u00bb\u00d0\u00b6\u00d0\u00b5\u00d0\u00bd", - "\u00c3\u0138", - "\u00d8\u00b1\u00d8\u00a9", - "\u00ec\u013f\u00b4\u00ec\u00a7\u0122", - "\u0120celle", - "cking", - "\u0120\u00d8\u00a7\u00d9\u0126\u00d8\u00ac", - "\u0120Canvas", - "\u0120espa\u00c3\u00b1", - "\u0120glimp", - "\u0120spreads", - "ongo", - "\u0120Mason", - "\u0120Ing", - "\u0120\u00ea\u00b0\u0122\u00eb\u012c\u00a5", - "\u00cf\u0126\u00ce\u00b9\u00ce\u00ba", - "\u0120secular", - "\u0120bater", - "\u0120inquiry", - "\u0120energies", - "\u0120manufactured", - "\u0120vegetarian", - "\u0120pineapple", - "\u00d1\u0131\u00d1\u0124\u00d0\u00b0", - "\u0120practitioners", - "2000", - "\u0120\u00ed\u0137\u00b4\u00ec\u013c\u0136", - "\u0120\u00ec\u0139\u00ac\u00eb\u0141\u00ac\u00eb\u00b6\u0126\u00eb\u0135\u00a4", - "\u0120\u00eb\u00b6\u012a\u00eb", - "\u0120Jefferson", - "\u0120Joan", - "\u0120tram", - "\u00e5\u00ae\u00b9", - "chmal", - "\u0120Hait", - "\u00e1\u00b9\u0129", - "\u0120unreal", - "\u0120symbolic", - "\u0120stealth", - "\u0120splash", - "\u0120Entertainment", - "\u0120metallic", - "?\".", - "\u00e8\u00b6\u012c", - "around", - "\u0120despair", - "\u0120Nevada", - "\u0120Finance", - "\u0120krie", - "\u0120Lux", - "\u0120Smash", - "keeping", - "\u0120\u00d0\u00b7\u00d0\u00b0\u00d0\u00b3", - "\u0120narciss", - "\u0120dzisiaj", - "\u0120tolerate", - "oard", - "\u0120linking", - "\u0120Economic", - "\u0120\u00ec\u00bc", - "\u0120morph", - "\u0120Nak", - "\u0120Baker", - "aton", - "rings", - "\u0120Peng", - "\u0120Airport", - "\u00e3\u0123\u012d\u00e3\u0123\u00a3\u00e3\u0123\u0141", - "\u00ed\u0137\u013a\u00eb\u012d\u00a4", - "\u00a7\u0123", - "prints", - "\u0120hadi", - "\u0120empir", - "\u0120Lives", - "anners", - "\u0120\u00d0\u00bd\u00d0\u00b8\u00d0\u00bc", - "\u0120PROFESSOR", - "\u0120positively", - "antom", - "\u0120badge", - "kelt", - "\u0120interfer", - "\u0120fulfilling", - "\u0120visualization", - "\u00e9\u0139\u013e\u00e4\u00bf\u0124", - "\u0120Price", - "\u00ef\u00bf\u00bd\u00ef\u00bf\u00bd", - "\u0120scenery", - "\u0120prone", - "\u0120wizard", - "\u0120banyak", - "verb", - "sky", - "\u0120wished", - "\u0120railway", - "\u0120\u00c3\u00bczer", - "\u0120alguien", - "\u0120AW", - "\u0120\u00d0\u00ba\u00d0\u00be\u00d0\u00bb\u00d0\u00b8\u00d1\u0129\u00d0\u00b5", - "\u0120reacting", - "\u0120Buch", - "\u00e0\u00b8\u00b6", - "\u0120anth", - "\u0120sih", - "\u0120hust", - "\u0120Screen", - "ilant", - "aho", - "\u0120fragrance", - "\u0120elevation", - "\u0120Mediter", - "\u0120\u00eb\u00bf", - "\u0120\u00c3\u00a9qu", - "\u0120wraps", - "\u0120inert", - "\u0120recreate", - "\u00d0\u00bb\u00d0\u00b0\u00d1\u0124", - "\u0120boleh", - "\u0120harassment", - "unky", - "\u0120glimpse", - "regierung", - "\u0120futur", - "\u0120repository", - "\u0120engra", - "\u0120trafficking", - "assis", - "\u0120Trek", - "\u0120\u00eb\u00b2\u012e", - "\u0120\u00eb\u00a7\u012a\u00eb", - "\u0120Kab", - "aniu", - "give", - "\u0120dinosaurs", - "\u0120feather", - "\u0120attitudes", - "\u0120plum", - "\u0120RS", - "\u0120Anfang", - "illery", - "\u0120\u00ec\u012c\u00a4", - "MY", - "\u0120trzeba", - "\u0120skies", - "\u0120Aj", - "urable", - "CU", - "\u0120Shane", - "\u0120departure", - "\u0120TON", - "ieten", - "rats", - "\u00e6\u00b0\u0139", - "isu", - "\u0120bord", - "\u0120interestingly", - "\u00e7\u013b\u00bb", - "oughing", - "\u0120rushing", - "\u0120volatility", - "\u0120pyt", - "\u0120formats", - "\u0120\u00d0\u00b7\u00d0\u00b0\u00d1\u0124", - "\u0120\u00ea\u00bc\u0143", - "\u0120whatnot", - "\u0120comport", - "sw", - "orean", - "\u0120Relax", - "\u0120clan", - "\u0120AH", - "\u0120pew", - "\u0120dictionary", - "Take", - "shirts", - "\u0120Hugh", - "\u0120\u00d8\u00b9\u00d9\u0126\u00d9\u012c", - "\u0120Pic", - "\u0120enrolled", - "\u0120jednak", - "\u0120offerings", - "\u0120coraz", - "Life", - "\u0120!!!", - "\u0120cler", - "\u0120Videos", - "\u0120Rodrig", - "\u0120Ident", - "\u0120Pos", - "\u0120Stage", - "\u0120Race", - "\u0120enact", - "\u00e3\u0123\u0126\u00e3\u0123\u00be\u00e3\u0123\u0139\u00e3\u0123\u0141", - "\u0120Gy", - "\u0120Hispan", - "\u0120defence", - "\u0120Campbell", - "matic", - "\u0120relev", - "\u0120peach", - "\u0126\u00b8\u00ec\u013c\u0136", - "\u0120paradise", - "\u0120ceremon", - "\u0120annoyed", - "\u00e6\u012e\u0129", - "lax", - "\u0120exploit", - "\u0120clause", - "eker", - "\u0120Bloom", - "nant", - "ateurs", - "\u0120heights", - "Even", - "\u00d1\u0123\u00d0\u00be\u00d0\u00bd", - "\u0120outrage", - "\u0120Vietnamese", - "\u00e3\u0123\u00af\u00e3\u0123\u00af", - "TR", - "\u0120eer", - "\u0120cannon", - "\u0120Comb", - "\u0132\u00eb\u00a7\u012e", - "\u00e8\u00bb\u012c", - "\u0120\u00ea\u00b2\u0125\u00eb\u0131\u0126", - "\u0120accomplishments", - "\u0120Analytics", - "\u0120shaping", - "reiben", - "\u0120bachelor", - "\u0120fingert", - "acked", - "\u0120pyramid", - "\u0120Stewart", - "\u00c3\u00a1st", - "\u0120survivor", - "\u0120duct", - "\u0120dealers", - "\u00e6\u00b4\u00bb", - "\u00d8\u00b9\u00d9\u0127", - "\u00d0\u00bb\u00d0\u00b8\u00d0\u00bd", - "\u0120ede", - "\u00d7\u0137\u00d7\u00a2", - "\u0120\u00d9\u0125\u00d8\u00a7\u00d9\u0128", - "\u0120\u00cf\u0126\u00ce\u00b9", - "\u0120chooses", - "\u0120Own", - "\u00d0\u00b3\u00d0\u00be\u00d1\u0124\u00d0\u00be\u00d0\u00b2", - "hire", - "\u00d0\u00b0\u00d0\u00bb\u00d1\u012e\u00d0\u00bd\u00d1\u012d\u00d0\u00b5", - "\u0120\u00d0\u013d\u00d1\u0130", - "\u0120\u00d0\u00be\u00d1\u0123\u00d1\u0124\u00d0\u00b0\u00d0\u00b2", - "tech", - "\u0120droit", - "\u0120subjective", - "enes", - "\u0120divis", - "avez", - "\u0120maneuver", - "\u00e0\u00b9\u0126\u00e0\u00b8\u0136", - "adece", - "\u0120Ens", - "acial", - "\u0120Protection", - "\u0138\u00b4", - "\u0120formally", - "\u0120wyd", - "ingu\u00c3\u00a9m", - "\u0120ziem", - "\u0120recruiting", - "\u00d7\u013b\u00d7\u013c", - "nem", - "\u0120forbidden", - "\u0120Bapt", - "\u00d7\u0132\u00d7\u0142\u00d7\u013b", - "\u0120subset", - "\u0120Magaz", - "nement", - "\u0120aquela", - "ragon", - "\u0120committees", - "\u0120\u00c3\u00a9taient", - "udi", - "\u0120Dawn", - "\u0120bore", - "\u0120composer", - "\u0120wi\u00c4\u013bcej", - "anga", - "\u0120dislike", - "\u0120Days", - "\u00e5\u0141\u00ba", - "\u0120paral", - "\u0120mientras", - "\u0120heavens", - "\u00e3\u0123\u0134", - "heid", - "\u0120traders", - "once", - "\u0120mascara", - "\u0120\u00cf\u0122\u00cf\u0123\u00ce\u00bf", - "\u0120whisper", - "\u0120Musk", - "\u00e9\u013d\u0128", - "\u0120Familie", - "Allah", - "\u0120Olivia", - "\u0120Pros", - "\u0120olika", - "ilim", - "\u0120r\u00c3\u00a9pond", - "\u0120Peters", - "\u0120\u00e5\u00be\u012a", - "\u0120bites", - "\u0120vic", - "\u0120NY", - "emption", - "\u0120450", - "\u0120visuals", - "\u0120lieu", - "\u00c3\u00bccken", - "\u0120Steel", - "\u0120GP", - "wait", - "\u0120noticeable", - "ucha", - "\u0120rehabil", - "\u0120rejection", - "\u0120\u00d1\u0123\u00d0\u00bb\u00d0\u00b5\u00d0\u00b4\u00d1\u0125\u00d1\u0130\u00d1\u012b", - "\u0120slider", - "\u0120regarded", - "\u0120gravit", - "\u0120Reserve", - "count", - "\u0120breeding", - "\u0120longe", - "aleb", - "\u0120knight", - "\u0120\u00d0\u00b2\u00d0\u00be\u00d0\u00b9", - "\u0120pr\u00c3\u00a9sent", - "\u0124\u013a\u00ec\u013c\u0136", - "\u0120Specifically", - "\u0120poses", - "\u0120veure", - "okay", - "emas", - "\u0120\u00e3\u0123\u00a7\u00e3\u0123\u013b", - "\u0120maj\u00c4\u0127", - "\u0120webinars", - "\u0120cannabis", - "\u0120damals", - "\u0120Northwest", - "\u0120pada", - "\u0120crowds", - "\u0120futures", - "\u0120\u00c3\u00a4n", - "\u0120civilians", - "\u0120Sachen", - "\u00e6\u012f", - "\u0120traces", - "\u0120\u00eb\u00a8\u00b9\u00ea\u00b3\u0142", - "QU", - "\u00e9\u00a1\u013a\u00e3\u0123\u0126", - "\u0120IF", - "an\u00c4\u00b1n", - "\u00ec\u0124\u00b4", - "\u0120biblical", - "\u0120Ved", - "\u0120storing", - "\u00d1\u0122\u00d0\u00b0\u00d0\u00b2\u00d0\u00bb\u00d1\u0131", - "\u00e6\u0129\u012b\u00e8\u00a9\u00b2", - "\u0120nast", - "\u0120d\u00c3\u00b6", - "\u00d1\u0122\u00d0\u00be\u00d0\u00bf", - "elia", - "\u0120sideways", - "\u0120Understand", - "\u0120Qur", - "\u0120perpend", - "\u0120Millionen", - "\u0120watermelon", - "\u0120Divine", - "ultur", - "abord", - "\u0120successes", - "\u0120hombre", - "\u0120carp", - "\u0120suscept", - "ungkin", - "\u0120kij", - "ulus", - "\u00d8\u00a7\u00d8\u00ac", - "\u0120notch", - "\u0120polynomial", - "\u00e5\u00b9\u00b2", - "\u00e5\u00a9", - "\u0120\u00c3\u00banico", - "\u0120telescope", - "\u0120politique", - "kiem", - "\u0120\u00ce\u0143\u00ce\u00bd\u00ce\u00b1", - "\u0120aggregate", - "\u0120Geoff", - "\u0120tril", - "\u0120GRA", - "\u0120subscriber", - "imet", - "\u0120\u00d0\u00b4\u00d0\u00be\u00d0\u00bb\u00d0\u00bb\u00d0\u00b0\u00d1\u0122", - "oping", - "\u0120therapeut", - "\u0120Cancer", - "\u0120parade", - "\u0120irrig", - "\u00e2\u013b\u00aa\u00e2\u013b\u00aa", - "\u0120clearer", - "\u0120bog", - "\u0120Maur", - "\u00e0\u00b8\u00b2\u00e0\u00b8\u0129", - "\u0120Shanghai", - "achte", - "\u0120Kol", - "elujah", - "\u0120hav", - "\u0120Crime", - "sek", - "\u0120\u00eb\u00a1\u013e", - "ienna", - "\u0120Gor", - "\u00e8\u013d", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d1\u0124\u00d1\u0122", - "\u0120\u00d0\u00ba\u00d0\u00b0\u00d0\u00b6\u00d0\u00b5\u00d1\u0124\u00d1\u0123\u00d1\u0131", - "\u0120Lift", - "\u0120Sort", - "\u0120Psal", - "\u0120ping", - "\u0135\u013f", - "phis", - "\u0120FUCK", - "\u0120Syn", - "\u0120bamboo", - "\u00ac\u00ec\u013a\u0123", - "cuts", - "\u0120mmm", - "\u0120funktioniert", - "\u0120_", - "\u00c3\u0143cio", - "Stop", - "\u0120imaginary", - "\u0120notamment", - "\u0120Initiative", - "\u00e3\u0125\u00a5", - "\u0120Kurt", - "\u0120loosen", - "\u0120buscar", - "\u00e7\u0123\u00ab", - "\u0120zelf", - "\u0120props", - "\u00e5\u013d\u012b", - "\u0120moeten", - "\u0120milli", - "\u0120halls", - "\u0120Match", - "\u0120brackets", - "\u0120Cou", - "\u00e6\u00a6\u0124", - "\u0120\u00d0\u013e\u00d0\u00b0\u00d1\u0122", - "ISA", - "\u0120cigarette", - "\u0120competitions", - "\u0120MIN", - "\u0120beh\u00c3\u00b6", - "voor", - "\u0120ust", - "\u0120Zi", - "\u0120Occ", - "ulates", - "\u0120balloons", - "\u0120pronto", - "\u0120Miy", - "\u0120File", - "\u0120\u00d0\u00ba\u00d0\u00bb\u00d0\u00b0\u00d1\u0123\u00d1\u0123", - "\u00d0\u00bd\u00d1\u0125\u00d0\u00bb", - "\u0120cereal", - "\u0120increment", - "\u0120refined", - "\u00e5\u0131\u00a6\u00e5\u00a4\u0138", - "prising", - "\u0120RF", - "\u0120respectful", - "\u0120loot", - "asket", - "\u0120deixa", - "ingle", - "\u0120funciona", - "\u0120Revel", - "\u0120sober", - "\u0120performs", - "\u0120Gentle", - "\u00e3\u0124\u00a8", - "\u0120recipient", - "\u0120Hause", - "\u0120\u00eb\u0125", - "From", - "\u0120ministers", - "\u0120paradox", - "\u00e5\u00b0\u00b1\u00e6\u013a\u00af\u00e8\u00aa\u00aa", - "\u0120tasting", - "\u0120\u00d7\u0136\u00d7\u0139", - "\u0120reuse", - "\u0120Lane", - "\u0120\u00d1\u0123\u00d0\u00be\u00d0\u00b2\u00d0\u00b5\u00d1\u0122\u00d1\u012a", - "\u0120remembers", - "\u0120feminist", - "\u0120commitments", - "\u0120projected", - "\u0120gaz", - "iyoruz", - "\u0120obligations", - "Ro", - "zar", - "\u0120chw", - "\u0120JAM", - "\u0120b\u00c4\u013bd\u00c4\u0127", - "aspberry", - "\u0120\u00d0\u00bc\u00d0\u00b5\u00d1\u0123\u00d1\u0124\u00d0\u00be", - "\u00eb\u00b2\u0137", - "\u0120regulated", - "\u0120wicht", - "\u0120Trevor", - "\u0120secondly", - "\u0120Ihre", - "elsh", - "\u0120reporters", - "\u00d1\u0124\u00d0\u00be\u00d1\u0122\u00d0\u00b0", - "oyo", - "GI", - "\u0120interconnect", - "\u00e9\u0132\u013a", - "OSH", - "\u00e6\u0143\u00b2", - "\u0120brass", - "\u0120ignoring", - "\u00e4\u00bb\u012c\u00e6\u0139\u00a5", - "infect", - "\u0120projekt", - "oret", - "\u00cf\u0126\u00ce\u00b1\u00ce\u00bd", - "\u0120\u00d1\u0124\u00d0\u00b8\u00d0\u00bf", - "\u0120mutta", - "\u0120unboxing", - "\u0126\u00b0", - "\u00e5\u00a1\u012c", - "\u0120advised", - "\u0120Denver", - "\u0120severely", - "\u0120Mhm", - "\u0120flipped", - "\u0120pien", - "\u0120kommun", - "\u0120FRE", - "\u0120\u00e0\u00ae\u0129\u00e0\u00ae\u00b0", - "ainted", - "\u0120knives", - "\u0120habl", - "\u0120geworden", - "arettes", - "CS", - "\u0120\u00d0\u00bc\u00d0\u00b0\u00d0\u00bb\u00d0\u00b5\u00d0\u00bd\u00d1\u012e", - "\u0120galax", - "\u0120ninete", - "\u00ea\u00b1\u00b0\u00eb\u0124\u013a", - "\u0120sis", - "\u0120advisory", - "\u0120drilling", - "\u0120Wouldn", - "\u00c3\u00bcnf", - "gestellt", - "\u0120Helen", - "\u0120\u00d7\u0140\u00d7\u0132", - "apolis", - "\u0120rzeczy", - "\u0120terra", - "\u0120hep", - "\u0120alg\u00c3\u00ban", - "ikk", - "\u0120astronom", - "\u0120Starbucks", - "k\u00c4\u0127", - "\u0120patrol", - "\u0120\u00ec\u00bd\u0136", - "\u0120gon", - "\u0120\u00e3\u0122\u0132", - "\u0120sonst", - "\u0120encounters", - "\u0120retrou", - "\u0120sharks", - "\u0120dor", - "\u0120Rever", - "\u0120evapor", - "\u0120reservoir", - "\u0120alleged", - "uler", - "\u0120verm", - "\u0120commerce", - "\u0120fitted", - "gem", - "\u0120tactical", - "\u0120lith", - "\u00e9\u012b\u0126\u00e5\u00a1\u0136", - "had", - "\u00e8\u00ae\u012c", - "\u0120carbohyd", - "\u0120lengths", - "\u00ce\u00b9\u00ce\u00bf", - "\u0120demographic", - "Rob", - "\u0120Skin", - "ccoli", - "\u0120simplified", - "\u0120readily", - "\u0120Cum", - "adesh", - "\u0120D\u00c3\u00a5", - "usst", - "igne", - "eton", - "\u0120menor", - "qi", - "OOM", - "\u00e0\u00b8\u0143\u00e0\u00b8\u013b", - "\u0120psychiat", - "\u0120eighty", - "\u0120\u00d0\u00bc\u00d0\u00b8\u00d0\u00bb\u00d0\u00bb\u00d0\u00b8", - "\u0120Tob", - "edo", - "\u00e7\u00b6\u00b2", - "\u0120\u00c4\u0133\u00e1\u00ba\u00bfn", - "\u0120circuits", - "\u0120LAUGH", - "icism", - "emor", - "\u0120regener", - "egree", - "\u0120bureauc", - "\u0120Alber", - "\u00e4\u00b9\u012d\u00e5\u00be\u012e", - "\u0120Wor", - "\u00e5\u00a4\u00ab", - "\u0120resin", - "\u0120by\u00c5\u0124y", - "\u0120IG", - "\u00e0\u00af\u012f,", - "\u012078", - "\u0120weeds", - "\u0120Myth", - "93", - "\u00e6\u00bf", - "\u0120\u00eb\u0124\u013a\u00ec\u013b\u0136", - "\u00c3\u00a9v", - "\u00e1\u00bd", - "\u00c3\u00b6ren", - "\u00c3\u00a7ar", - "\u0120PAUL", - "\u0120disadvant", - "\u0120positioning", - "\u0120cocktail", - "\u0120agrees", - "nn", - "\u0120Sally", - "Ms", - "\u0120inherent", - "\u0120monetary", - "\u0120natur", - "\u0120Nh", - "\u0120Import", - "\u0120leben", - "\u0120wi", - "ussy", - "\u0120obes", - "\u0120wandering", - "\u0120\u00ec\u012d\u0142\u00eb", - "\u00c4\u0127da", - "etchup", - "\u0120disposal", - "\u0120JA", - "\u0120Cer", - "zilla", - "\u0120virgin", - "\u0120Slide", - "andel", - "\u0120righteousness", - "\u0120\u00ce\u00a3", - "\u0120ideia", - "\u00e4\u00bd\u0142\u00e5\u00a5\u00bd", - "\u00d0\u00b8\u00d1\u0122\u00d0\u00be\u00d0\u00b2\u00d0\u00b0\u00d1\u0124\u00d1\u012e", - "\u00d7\u00a8\u00d7\u0132", - "Comment", - "\u0120prelim", - "\u0120Vale", - "\u0120\u00ec\u00a7\u0122\u00eb\u0124\u013e", - "\u0120Vanc", - "OMAN", - "\u0120\u00d0\u00bf\u00d1\u0138\u00d0\u00b4", - "\u0120yum", - "stre", - "cem", - "\u0120pocz", - "\u0120fragment", - "\u0120\u00d1\u0123\u00d0\u00bb\u00d1\u0125\u00d1\u0129\u00d0\u00b0\u00d0\u00b5", - "\u0120undergo", - "\u0120Hank", - "ceks", - "\u0120FPS", - "\u0120ocur", - "\u0120deterior", - "\u00e6\u00b3\u00a8", - "\u0120empresas", - "Paul", - "\u0120)))", - "\u0120\u00d0\u00b2\u00d1\u0122\u00d0\u00b5\u00d0\u00bc\u00d0\u00b5\u00d0\u00bd\u00d0\u00b8", - "\u0120scold", - "\u00d7\u013b\u00d7\u00a2", - "\u0120suspected", - "\u0120accessing", - "\u0120substit", - "\u0120historians", - "\u00e4\u00bb\u00bb", - "\u0120\u00d0\u00b4\u00d0\u00b5\u00d0\u00bb\u00d0\u00be", - "\u0120socied", - "rone", - "\u0120reden", - "\u0120extends", - "epherd", - "\u0120balcon", - "\u00e4\u00b8\u012f\u00e8\u00b5\u00b7", - "\u0120Solo", - "\u0120politician", - "\u00d0\u00be\u00d0\u00bb\u00d1\u012e\u00d0\u00bd\u00d0\u00be", - "\u0120irgendw", - "\u0120traumatic", - "\u0120rapper", - "\u0120ROBERT", - "Really", - "\u00e6\u0123\u00af", - "\u0120lineup", - "ASE", - "\u0120contractor", - "\u0120Corporation", - "gor", - "\u0120Todo", - "\u00d1\u0123\u00d1\u0124\u00d1\u0122\u00d0\u00be\u00d0\u00b9", - "FBE", - "\u0120newsletter", - "\u0120ko\u00c5\u0126", - "alties", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b8\u00d1\u0129", - "\u0120Heavy", - "\u0120swords", - "\u0120manipulation", - "\u0120funk", - "\u0120v\u00c3\u00a5r", - "\u0120Taliban", - "\u0120\u00eb\u00b0\u00a5", - "\u0120acne", - "\u00c3\u00bcr\u00c3\u00bc", - "\u0120deswegen", - "\u0120Dust", - "\u0120silic", - "\u0120hooks", - "\u0120blij", - "\u0120petits", - "\u0120filme", - "\u0120Bereich", - "\u0120Said", - "\u0120imposed", - "\u0120diary", - "\u0120\u00d0\u00b3\u00d0\u00be\u00d1\u0122", - "\u0120Gates", - "\u0120alta", - "\u00e5\u00b8\u012e", - "\u0120chcia", - "pleasant", - "\u0120\u00eb\u00b0\u013f", - "\u0120mo\u00c5\u00bcemy", - "\u0120Austria", - "\u0120broker", - "\u0120sucked", - "\u00e8\u0122\u0125", - "\u0120compartment", - "\u0120clone", - "\u0120\u00d7\u0136\u00d7\u00a2", - "\u0120Danke", - "\u0120nochmal", - "\u00d0\u00b5\u00d0\u00b7\u00d0\u00b4", - "\u0120adrenal", - "\u0120kleinen", - "\u00e3\u0123\u00be\u00e3\u0123\u0139\u00e3\u0124\u0129\u00e3\u0123\u0128", - "\u0120subsequently", - "\u0120decentral", - "\u0120genetics", - "\u0120\u00ea\u00b4\u0133", - "\u0120monitors", - "\u0120Applic", - "\u0120Reporter", - "wert", - "\u0120wiem", - "\u0120Movement", - "\u0120interviewing", - "\u0120hairs", - "\u0120pu\u00c3\u00b2", - "\u0120Chelsea", - "\u0120coher", - "\u0120cot", - "\u0120zas", - "\u0120patches", - "\u0120lah", - "\u00d1\u0125\u00d0\u00bd\u00d0\u00ba", - "\u0120Reagan", - "\u0120Marco", - "city", - "\u0120defender", - "\u0120decoration", - "iji", - "\u0120litter", - "\u00d0\u00a8", - "\u0120jego", - "REW", - "\u0120Pik", - "\u0120Hee", - "\u0120Iv", - "\u0120\u00d0\u00b8\u00d0\u00b4\u00d0\u00b5", - "\u0120Theater", - "\u0120\u00d1\u0129\u00d0\u00b0\u00d1\u0123\u00d1\u0124\u00d0\u00be", - "\u0120sweater", - "\u0120highlighting", - "\u0120ainsi", - "\u0120diplomatic", - "\u0120Nevertheless", - "\u00e5\u00b3", - "ASON", - "\u0120p\u00c3\u00bablico", - "\u0120ferm", - "reated", - "cod", - "\u0120\u00eb\u00ac\u00bc\u00eb", - "\u0120mister", - "\u0120Vancouver", - "\u0120recognizes", - "ecd", - "\u0120complications", - "encial", - "\u00e3\u0123\u0139\u00e3\u0123\u0131", - "\u0120\u00ea\u00b0\u0122\u00ec\u00a7\u0122", - "\u0120Ultimate", - "\u0120vaig", - "\u0120Merry", - "\u00d7\u0137\u00d7\u0134", - "\u0120Marcus", - "\u00e7\u00b8\u00bd", - "owego", - "\u0120mente", - "Sm", - "\u0120aja", - "\u0120Tao", - "\u0120judicial", - "\u0120entrepreneurship", - "\u0120\u00d0\u00bd\u00d0\u00b5\u00d0\u00bc\u00d0\u00bd\u00d0\u00be\u00d0\u00b3\u00d0\u00be", - "\u0120pis", - "\u0120erg", - "\u0120christ", - "\u0120Curt", - "\u0120\u00d1\u0122\u00d0\u00b0\u00d1\u0123\u00d0\u00bf", - "\u00ce\u00bb\u00ce\u00b5", - "ensch", - "\u00c3\u0143re", - "\u0120focal", - "\u0120Diamond", - "av\u00c3\u0143a", - "\u0120hanno", - "\u0120Squad", - "\u0120associations", - "\u0120Creative", - "\u0120messenger", - "\u0120begging", - "\u0120decimal", - "\u0120d\u00c4\u00b1\u00c5\u0141", - "\u0120metadata", - "sels", - "\u0120\u00c4\u00b0\u00c5\u0141", - "\u00e1\u00bb\u00afa", - "\u0120difficile", - "d\u00c4\u00b1", - "\u0120slaughter", - "\u0120Verg", - "\u0120\u00d7\u0134\u00d7\u013f", - "\u00e7\u00b0\u00a1", - "\u00e6\u012e\u012b", - "\u0120Tea", - "asses", - "Ok", - "\u0120synthes", - "otiation", - "\u0120painter", - "\u0120elbows", - "\u0120architectural", - "\u0120\u00d1\u0122\u00d0\u00b0\u00d0\u00b4", - "\u0120glor", - "image", - "ampa", - "culiar", - "\u0142\u00a8", - "\u0120teve", - "\u0120Stelle", - "\u0120Bam", - "\u0120\u00ec\u00b4\u012a", - "asis", - "ipedia", - "\u0120GI", - "\u0120Active", - "\u00e7\u0126\u00b6\u00e5\u0132\u0130", - "azi", - "\u00e3\u0124\u012e\u00e3\u0123\u00a6", - "\u0120Lucky", - "\u00ed\u0137\u00a9", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b8\u00d1\u0127\u00d0\u00be\u00d0\u00b4", - "\u0120runway", - "\u0120authentication", - "\u0120posible", - "\u0120supplements", - "\u0120surgical", - "Gen", - "\u0120feasible", - "DO", - "\u0120outlook", - "\u0120intervals", - "\u0120anecd", - "\u00c3\u0142ng", - "\u0120straps", - "\u0120Shu", - "udd", - "issenschaft", - "\u0120porte", - "\u0120committing", - "\u0120alley", - "\u0120covenant", - "\u0120Pedro", - "lessness", - "\u0120Solid", - "\u0120Molly", - "\u0120\u00d0\u00bd\u00d0\u00b5\u00d0\u00ba\u00d0\u00be\u00d1\u0124\u00d0\u00be\u00d1\u0122", - "\u0120cooperate", - "\u00e5\u012e\u0139", - "ollen", - "\u0120tuna", - "\u0120kindergarten", - "\u0120Siz", - "\u0120du\u00c5\u00bco", - "\u0120MBA", - "\u0120GEORGE", - "\u0120Fisher", - "\u00e5\u00bf\u013a", - "\u0120Caesar", - "\u0120\u00d0\u00ba\u00d1\u0122\u00d0\u00b0\u00d1\u0123\u00d0\u00b8\u00d0\u00b2", - "\u0120Delhi", - "zym", - "\u0120explicar", - "\u00ea\u00b0\u0122\u00ec\u00a7\u0122", - "uns", - "grow", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b8\u00d1\u0123", - "\u012086", - "\u0120stating", - "\u0120massa", - "chter", - "\u0120\u00ec\u00bb\u00ac\u00eb\u0141\u00ac", - "\u0120deputy", - "SM", - "noc", - "\u0120geography", - "\u0120Enterprise", - "\u0120Cant", - "\u00c3\u00b6z", - "\u0120unpack", - "\u0120\u00ed\u013b\u0136\u00eb", - "\u0120searches", - "\u0120presidency", - "\u0120trivial", - "\u0120pige", - "oubt", - "\u00e3\u0124\u013c", - "\u00ec\u00bc\u0122\u00ec\u013f\u00b4", - "\u0120budgets", - "\u0120ub", - "\u0120pne", - "\u0120Yale", - "\u0120\u00c5\u0141\u00c3\u00b6yle", - "regular", - "\u0120imperfect", - "ARA", - "\u0120fam\u00c3\u0143lia", - "urm", - "\u0120Adventure", - "\u00e3\u0125\u012c", - "cis", - "emark", - "\u0120nego", - "\u0120inappropriate", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b8\u00d0\u00b7", - "\u0120\u00d1\u0122\u00d0\u00be\u00d0\u00bb", - "\u0120dreamed", - "Bry", - "\u0120shuttle", - "\u0120pillars", - "\u0120bik", - "inum", - "\u0120\u00d1\u0125\u00d1\u0123", - "\u0120Nebr", - "\u0120perpendicular", - "\u0120booked", - "bery", - "\u0120vikt", - "bear", - "esus", - "\u0120\u00d0\u00b2\u00d0\u00be\u00d0\u00b7\u00d0\u00bc\u00d0\u00be\u00d0\u00b6\u00d0\u00bd\u00d0\u00be", - "\u00a8\u00b9", - "\u0120presumably", - "\u0120Memphis", - "\u0120ambulance", - "\u00d7\u0137\u00d7\u0140\u00d7\u00a8", - "\u0120thumbnail", - "\u0120modification", - "\u00e9\u0129\u0131", - "\u0120interpreted", - "\u0120promo", - "\u0120\u00ce\u00ba\u00ce\u00ac", - "\u0120\u00ce\u00b5\u00cf\u0122", - "\u0120acoustic", - "\u0120DB", - "\u00e5\u0135\u0130", - "\u0120nonetheless", - "oule", - "\u0120pequ", - "\u0120knob", - "\u00e3\u0124\u00a3", - "\u0120\u00eb\u0131\u012e\u00ec\u0137\u0126", - "\u0120purchases", - "\u0120\u00c3\u0129\u00c3\u00bcnk\u00c3\u00bc", - "\u0120dividing", - "perform", - "raction", - "healthy", - "\u0120Title", - "\u0120uk", - "\u0120cerca", - "\u0120arguably", - "\u0120fale", - "\u00eb\u00b3\u00b5", - "\u0120gamers", - "\u0120utilizing", - "\u0120offended", - "\u0120tava", - "al\u00c4\u00b1", - "\u0120median", - "\u0120infectious", - "\u0120Annie", - "\u0120smartphones", - "\u0120parole", - "\u00e5\u0138\u013f", - "\u0120Epic", - "zza", - "\u0120unified", - "\u0120\u00ea\u00b7\u00b8\u00eb\u0137\u012e", - "\u0120curtain", - "\u0120\u00c4\u0125", - "\u0120sexually", - "\u0120unserem", - "\u0120Convention", - "\u0120allegedly", - "Ya", - "\u0120Hoo", - "enment", - "\u00e6\u0122\u00aa", - "\u00ed\u013d\u0126", - "\u0120gigantic", - "\u0120noting", - "\u0120rebo", - "\u0120Jama", - "\u0120Alz", - "\u0120borrowed", - "\u00ec\u00b9\u00a8", - "\u0120peripher", - "\u00d0\u00be\u00d1\u0124\u00d0\u00b0", - "\u0120GB", - "\u0120Gear", - "\u0120economically", - "\u0120telefon", - "\u0120queremos", - "\u0120\u00d0\u00b4\u00d0\u00b0\u00d0\u00bb\u00d1\u012e\u00d1\u012a\u00d0\u00b5", - "\u0120ras", - "\u0120Teach", - "icios", - "atos", - "\u0120pledge", - "bau", - "\u0120Himself", - "Link", - "\u0120espero", - "\u0120chromos", - "\u0120PER", - "\u0120erle", - "\u0120podium", - "\u00c3\u00a7os", - "\u0120nieu", - "\u0120fen", - "\u0120GOD", - "\u0120Chocolate", - "werk", - "\u0120t\u00e1\u00bb\u00ab", - "\u0120suppress", - "\u00ce\u00bb\u00ce\u00b7", - "\u0120240", - "\u0120sit\u00c3\u00a4", - "\u0120honesty", - "\u0120Bio", - "\u0120Bard", - "\u0120\u00d0\u00be\u00d0\u00b1\u00d1\u012b\u00d0\u00b5\u00d0\u00bc", - "\u0120\u00d0\u00bc\u00d1\u0125\u00d0\u00b7", - "\u0120marble", - "\u0120\u00d1\u0128\u00d0\u00b5\u00d0\u00bd\u00d1\u0124", - "\u0120procure", - "\u0120rotor", - "bern", - "\u0120tuh", - "\u0120headset", - "atem", - "\u0120warranty", - "\u00e0\u00ae\u00b4", - "\u0120filing", - "\u00ce\u00b9\u00ce\u00ac", - "\u0120comprendre", - "\u0120impulse", - "\u0120salv", - "written", - "\u0120institute", - "Kim", - "\u0120LGBTQ", - "ficiente", - "His", - "\u0120\u00ce\u00b1\u00cf\u0127\u00cf\u0126\u00cf\u012e", - "\u0120teenage", - "orus", - "\u0120\u00d1\u0122\u00d0\u00b0\u00d0\u00b7\u00d0\u00b1", - "See", - "\u0120Conserv", - "\u00e1\u00bb\u0123n", - "fulness", - "\u0120strawberries", - "\u0120Abu", - "\u00d0\u00b8\u00d0\u00be\u00d0\u00bd", - "\u0120olla", - "NOISE", - "\u0120Employ", - "\u0120wiped", - "urger", - "\u0120modifications", - "\u0120\u00ed\u0137\u013a\u00ec\u00a7\u0122", - "\u0120footsteps", - "\u0120honors", - "\u0120adul", - "\u0120flipping", - "\u0120HU", - "ZY", - "\u0120integrating", - "\u00d8\u00a8\u00d8\u00b1", - "ulla", - "\u0120natuurlijk", - "\u0120\u00ed\u0139\u012a", - "\u0120Ethereum", - "\u00d9\u012c\u00d9\u0126", - "wed", - "\u0120peaks", - "\u0120Kes", - "\u0120bloom", - "\u0120crashing", - "\u0120911", - "\u0120\u00d0\u00be\u00d1\u0124\u00d0\u00bb\u00d0\u00b8\u00d1\u0129", - "\u0120controllers", - "\u0120Dod", - "\u0120\u00d0\u00b2\u00d0\u00bc\u00d0\u00b5\u00d1\u0123\u00d1\u0124\u00d0\u00b5", - "\u0120sortir", - "\u00e5\u00a5\u0129", - "\u0120Straight", - "\u0120Gracias", - "\u0120groove", - "\u0120togg", - "\u0120\u00ec\u012d\u00b6\u00ec\u013f\u0122", - "\u00c3\u00a9ro", - "\u0120outward", - "\u0120WA", - "\u0120Rocky", - "\u0120scam", - "\u0120hayat", - "ignty", - "\u00e2\u0126", - "plings", - "\u0120antibiotics", - "\u0120\u00e4\u00b8\u0122", - "\u0120nevertheless", - "jang", - "commerce", - "\u0120spoiler", - "\u0120glove", - "\u0120chatter", - "\u0120BY", - "~?", - "\u0120\u00ed\u013a\u00b8", - "\u0120demol", - "wechsel", - "imir", - "\u0120raid", - "\u00d0\u00b5\u00d1\u0122\u00d1\u0127", - "\u00ec\u0140\u0132\u00ea\u00b8\u00b0", - "enf", - "\u0120commented", - "\u0120optimized", - "\u0120convicted", - "\u0120bats", - "\u0120SB", - "\u0120Aur", - "\u0120Tong", - "\u0120implicit", - "\u0120Janet", - "\u0120reag", - "\u00e3\u0123\u00b2", - "\u0120Advanced", - "\u0120impose", - "\u00d7\u00a9\u00d7\u0136", - "\u0120schemes", - "ougher", - "abolic", - "\u0120\u00ea\u00b1\u00b0\u00ec\u00a3\u0142", - "\u0120slowing", - "\u0120wtedy", - "\u0120destructive", - "\u0120\u00d0\u00be\u00d0\u00bf\u00d1\u0122\u00d0\u00b5\u00d0\u00b4", - "\u0120landmark", - "\u0120\u00eb\u0131\u012a", - "\u0120Walking", - "\u00e1\u00ba\u00b9", - "\u0120tijd", - "\u0120KN", - "\u0120Quant", - "\u00ec\u013a\u00a4\u00eb", - "\u0120\u00d0\u00ba\u00d1\u0122\u00d1\u0125", - "\u0120perder", - "\u0120nove", - "\u00c3\u00a4nde", - "\u0120\u00e3\u0123\u0139", - "bia", - "\u0120custody", - "\u0120biod", - "\u00e6\u013f\u00b1\u00e8\u00a5\u00bf", - "\u0120directing", - "...\u00e2\u0122\u012d", - "\u0120reloc", - "\u0120demande", - "\u00e3\u0124\u0135\u00e3\u0123\u0142", - "\u0120o\u00c4\u0141lum", - "\u0120\u00d0\u00be\u00d0\u00b4\u00d0\u00bd\u00d0\u00b0", - "\u0120Milk", - "\u00e5\u0131\u00b7", - "\u0120Kra", - "\u0120Honda", - "\u0120pue", - "\u0120elekt", - "\u0120beginners", - "\u0120spear", - "\u00c3\u0143nh", - "\u0120Luft", - "\u0120nig", - "\u0120Schools", - "\u0120forums", - "\u0120Qin", - "ppo", - "\u0120zag", - "\u0120\u00d0\u00ae", - "\u0120toothp", - "\u0120Style", - "\u00ec\u00b4\u012a", - "\u0120punct", - "\u0120reps", - "\u0120Aly", - "\u0120amendments", - "\u0120\u00c3\u00b6z", - "\u0120digits", - "urai", - "\u0120chaotic", - "\u0120Masters", - "eon", - "\u0120Cash", - "\u0120Cuz", - "\u0120bedeutet", - "\u0120scanning", - "\u0120\u00d0\u00b6\u00d0\u00b4", - "\u00d0\u00bd\u00d0\u00b5\u00d1\u0124", - "\u0120certainty", - "jek", - "\u0120dijo", - "\u0120Climate", - "\u0120rinse", - "\u0120krij", - "veland", - "\u0120soundtrack", - "\u0120Safe", - "\u0120Nova", - "94", - "\u0120athe", - "\u0120Verb", - "oler", - "\u00ec\u013f\u00b4\u00ec\u00a3\u0142", - "\u0120vin", - "\u0120respiratory", - "\u0120Study", - "\u0120CAM", - "\u0120avocado", - "\u0120Zhen", - "\u0120latency", - "\u0120feathers", - "\u0120contar", - "\u0120\u00d0\u00b2\u00d0\u00b5\u00d1\u012b", - "\u0120fark", - "\u0120blended", - "\u0120exploded", - "\u0120XX", - "\u0120Benim", - "\u0120algu\u00c3\u00a9m", - "istoire", - "\u0120confidential", - "\u0120mast", - "\u0120\u00ec\u00bf", - "geh", - "\u0120disrespect", - "\u0120Systems", - "\u00c6\u00b0a", - "Ed", - "\u0120wys", - "\u0120exotic", - "\u0120glowing", - "\u00c3\u00b9ng", - "ounge", - "\u00e8\u0126", - "\u00d0\u00b0\u00d0\u00bd\u00d0\u00b8\u00d0\u00b7", - "\u0120palav", - "\u0120Sword", - "\u0120gim", - "\u0120Crow", - "\u0120potent", - "bish", - "\u0120abused", - "\u0120Jed", - "\u0120gambling", - "\u0120Spect", - "\u0120investigators", - "\u00e6\u013b\u013c", - "\u0120ratt", - "\u0120dob", - "\u0120DES", - "hog", - "\u0120\u00d0\u00be\u00d1\u0124\u00d0\u00ba\u00d1\u0122\u00d1\u012d", - "\u00ed\u012e\u0127", - "\u0120\u00d0\u00b4\u00d0\u00b5\u00d0\u00bd\u00d1\u012e\u00d0\u00b3\u00d0\u00b8", - "\u0120\u00ed\u013a\u00b9", - "\u0120\u00eb\u00a8\u00b8\u00eb\u00a6\u00ac", - "\u0120saturation", - "\u0120inherited", - "\u0120Innovation", - "\u00ec\u0139\u012a\u00eb\u012f\u013a", - "\u0120tangible", - "\u0120depri", - "hed", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00bc\u00d0\u00be\u00d0\u00b3", - "\u0120sliced", - "\u00e0\u00a5\u012f", - "\u0120th\u00e1\u00ba\u00bf", - "\u00c5\u00a5", - "68", - "\u0120corona", - "\u0120gifted", - "\u0120soir", - "\u0120humility", - "\u0120\u00ec\u013f\u00b4\u00ea\u00b1\u00b8", - "\u0120flaws", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b0\u00d0\u00ba\u00d1\u0124\u00d0\u00b8", - "\u0120kald", - "wa\u00c5\u00bc", - "yw", - "\u00e3\u0124\u0135\u00e3\u0123\u00a7\u00e3\u0123\u013b", - "irteen", - "\u0120crochets", - "\u00a6\u00ac\u00ea\u00b0\u0122", - "\u0120\u00ec\u0142\u0126\u00ec\u0139\u0132", - "\u0120dese", - "\u00e6\u00a5\u0143", - "\u0120\u00d0\u00bc\u00d0\u00b0\u00d0\u00b3", - "\u0120dzia\u00c5\u0124", - "\u0120l\u00c3\u00a9g", - "changing", - "\u0120llev", - "\u00c5\u0126sk", - "\u00e7\u0136\u00bb", - "\u01201984", - "orns", - "\u0120Welsh", - "\u0120pharmaceutical", - "\u0120pumping", - "\u0120Shaw", - "punk", - "\u0120vault", - "\u0120kinetic", - "\u0120hurricane", - "\u0120Including", - "\u00e1\u00bb\u00a9c", - "\u0120Grandpa", - "anship", - "\u00e9\u00a6\u013b\u00e6\u00b8\u00af", - "\u0120\u00d0\u00b2\u00d1\u012d\u00d1\u0127\u00d0\u00be\u00d0\u00b4", - "\u00d0\u00bd\u00d0\u00be\u00d0\u00b6", - "\u013e\u0142", - "utta", - "\u0120\u00ea\u00b2\u0123\u00eb\u012d\u012a\u00eb\u012d\u00a4", - "\u0120baz", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d1\u012a", - "\u0120peculiar", - "zy\u00c4\u0129", - "\u0120Ellie", - "\u0120learns", - "\u0120Krishna", - "\u0120consecut", - "\u0120empath", - "\u0120Din", - "\u0120traded", - "\u0120Boris", - "uggage", - "olla", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d0\u00b7\u00d0\u00b2", - "\u0120eternity", - "\u0120\u00d0\u00b2\u00d0\u00bf", - "\u00c3\u00a8mes", - "\u0120grapp", - "b\u00c3\u00a9", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b5\u00d0\u00b4\u00d1\u0123\u00d1\u0124\u00d0\u00b0\u00d0\u00b2", - "\u0120FC", - "\u012f\u00eb\u012d\u012a\u00eb\u012d\u00a4", - "even", - "\u0120Nebraska", - "ortune", - "\u0120karena", - "\u0120Agent", - "\u0120sting", - "\u0120PI", - "\u0120municipal", - "powered", - "\u0120consegue", - "\u0120Manchester", - "\u0120rainy", - "\u0120bli", - "\u0120kost", - "\u0120halten", - "\u0120Ahhh", - "insula", - "erting", - "\u0120\u00d8\u00a7\u00d9\u0126\u00d9\u0123", - "\u0120relacion", - "\u0120komen", - "\u0120dome", - "\u0120priests", - "\u0120Introdu", - "rophe", - "shore", - "velt", - "clipse", - "\u0120\u00d1\u0122\u00d1\u0125\u00d1\u0123", - "\u00d7\u013b\u00d7\u00a1", - "\u0120sabemos", - "\u0120Holland", - "ogi", - "anki", - "\u0120Mats", - "\u0120smoked", - "ullie", - "\u0120europe", - "\u0120\u00d0\u00b4\u00d0\u00b5\u00d0\u00b9\u00d1\u0123\u00d1\u0124\u00d0\u00b2\u00d0\u00b8\u00d1\u0124\u00d0\u00b5\u00d0\u00bb\u00d1\u012e\u00d0\u00bd\u00d0\u00be", - "\u0120bardziej", - "\u0120transforming", - "\u0120Ez", - "opath", - "\u0120\u00ec\u0138\u00b8\u00eb\u012d\u012a", - "\u0120\u00d1\u0123\u00d1\u0124\u00d0\u00b0\u00d0\u00bd", - "\u00e1\u00ba\u00b1ng", - "\u00e0\u00b8\u00b1\u00e0\u00b9\u012b", - "\u0120Ouch", - "\u0120clearance", - "ustain", - "\u0120solidarity", - "\u0120proving", - "\u0120\u00d0\u013a\u00d0\u00bd", - "\u0120\u00d1\u0123\u00d1\u012c", - "\u0120prolong", - "\u00d0\u00b0\u00d0\u00b4\u00d0\u00bd\u00d0\u00be", - "\u0120sos", - "\u0120Deal", - "\u0120170", - "mons", - "\u0120\u00d0\u00b7\u00d0\u00b5\u00d0\u00bc", - "\u0120logged", - "\u0120lifelong", - "\u0120sensory", - "\u0120behold", - "\u0120FAR", - "\u00c3\u00a8tement", - "\u0120Federation", - "\u0120dodge", - "\u0120Shir", - "\u0120dragons", - "\u0120Arctic", - "\u00c4\u0127\u00c5\u00bc", - "\u00c5\u012f", - "\u00c2\u00ba", - "\u0120denke", - "\u0120podr\u00c3\u0143a", - "cole", - "\u00d1\u0125\u00d0\u00bb\u00d1\u012e\u00d1\u0124\u00d0\u00b0\u00d1\u0124", - "\u0120systematic", - "\u00d0\u00b0\u00d0\u00bc\u00d0\u00b0", - "chos", - "\u0120clinics", - "\u0120BS", - "\u0120tales", - "usions", - "\u0120\u00ed\u012a\u00ac", - "\u0120preservation", - "\u0120lore", - "\u0120Protest", - "\u00e1\u00bb\u013d", - "\u00e5\u00b8\u0124", - "\u0120acknowledged", - "\u0120Isaiah", - "\u0120\u00eb\u0137\u012e\u00eb\u012c\u0136", - "\u0120\u00d7\u013a", - "\u0120competitor", - "\u0120advancing", - "zip", - "\u0120tenth", - "\u0120Laure", - "\u0120hints", - "\u0120exercising", - "\u0140\u013e\u00eb", - "\u0120Intelligence", - "uated", - "OUT", - "oped", - "\u0120autonomy", - "\u0120branding", - "\u0120Mediterranean", - "\u00d1\u0138\u00d0\u00ba", - "\u0120screwdriver", - "\u0120supre", - "\u0120stap", - "\u0120jurisdiction", - "\u0120Settings", - "\u0120forefront", - "\u0120Female", - "comfort", - "\u0120multiplication", - "\u0120Murray", - "\u0120bob", - "\u0120Tas", - "\u0120tahu", - "\u0120onun", - "etter", - "\u0120prophets", - "lag", - "\u0120revenues", - "\u0120pr\u00c3\u00a1", - "\u0120uploading", - "\u0120machinery", - "ascal", - "\u0120Est\u00c3\u00a1", - "\u0120Goth", - "\u0120Bald", - "\u0120Saw", - "\u0120stripes", - "\u00ec\u0142\u0133", - "\u0120powin", - "\u00e6\u0139\u00a5\u00e6\u013e\u00ac", - "\u0120hostile", - "\u0120darum", - "\u0120prevented", - "\u00d0\u00be\u00d0\u00b6\u00d0\u00b0\u00d0\u00bb\u00d1\u0125\u00d0\u00b9\u00d1\u0123\u00d1\u0124\u00d0\u00b0", - "\u0120algunas", - "\u0120hopeless", - "\u0120znaj", - "\u0120readings", - "\u0120craving", - "tat", - "\u0120Pig", - "\u0120liar", - "\u00e7\u012a\u00b1", - "\u0120multiplayer", - "\u0120dale", - "\u0120Course", - "\u00ed\u0123\u00bc", - "\u0120Kita", - "\u0120customs", - "\u0120responds", - "endra", - "\u00e8\u00a6\u0138", - "\u0120metro", - "\u00d1\u0123\u00d0\u00be\u00d0\u00bb", - "\u0120mitigate", - "\u0120oppression", - "\u0120\u00e6\u012a\u0133\u00e5\u0122\u0133", - "quinho", - "\u0120ammo", - "\u0120enfer", - "\u0120pony", - "\u0120ounces", - "\u00b0\u0136", - "\u0120\u00ec\u012a\u013a\u00ea\u00b0\u0122", - "\u0120dicho", - "\u0120Deb", - "\u0120wonders", - "\u0120Roose", - "\u0120prizes", - "\u0120ALEX", - "\u0120thankfully", - "\u0120tissues", - "\u0120\u00d1\u0122\u00d0\u00b0\u00d0\u00b2\u00d0\u00bd\u00d0\u00be", - "\u0120Luna", - "intelligible", - "\u0120\u00ec\u013b\u00b8", - "\u00ea\u00b0\u0133", - "\u0120Heat", - "\u0120\u00d1\u0123\u00d0\u00b8\u00d0\u00b4", - "\u0120Qui", - "\u0120ions", - "\u0120accommodation", - "\u00e4\u00be\u00bf", - "\u0120Kart", - "ienst", - "\u0120tarde", - "\u0120soaked", - "\u0120Casey", - "\u0120\u00ec\u00b4\u013f", - "\u0120\u00d1\u0122\u00d1\u0125\u00d0\u00b1", - "\u0120differenti", - "\u0120leftover", - "\u0120exchanges", - "second", - "\u0120firstly", - "\u0120builder", - "rien", - "\u0120dw", - "\u0120bouncing", - "?<", - "olog\u00c3\u0143a", - "wealth", - "\u0120meditate", - "\u0135\u00a4\u00ec\u013f\u013a", - "\u0120Craft", - "\u00e8\u00a7\u012b\u00e5\u00be\u0139", - "\u00e6\u013b\u00ae", - "riv", - "\u0120Against", - "\u0120ceramic", - "esp\u00c3\u00a8re", - "\u0120competent", - "\u0120Hopkins", - "\u0120kilos", - "\u0120gravel", - "\u0120piston", - "\u0120friendships", - "\u0120escre", - "\u0120voz", - "\u0120Gesellschaft", - "\u0120unterst\u00c3\u00bct", - "\u0120muj", - "\u0120warnings", - "pos", - "\u0120Professional", - "wszy", - "odle", - "bands", - "\u0120teamwork", - "stellung", - "\u0120dx", - "\u00e5\u012f\u012c", - "\u0120attorneys", - "\u0120weitere", - "\u00e3\u0127\u012d\u00e3\u0127\u012d\u00e3\u0127\u012d", - "\u0120Original", - "\u00d7\u013b\u00d7\u0139", - "\u0120broadcasting", - "\u0120\u00d0\u00bf\u00d0\u00b5\u00d1\u0122\u00d0\u00b2\u00d1\u012d\u00d0\u00b9", - "uchi", - "\u0120heure", - "\u0120grabs", - "\u0120WOR", - "\u0120Plaid", - "Min", - "\u0120paz", - "\u0120Puis", - "umu", - "itates", - "\u0120coats", - "\u0120buen", - "\u0120heir", - "\u0120pneum", - "\u00d7\u00a9\u00d7\u00a8", - "enser", - "\u0120JUDGE", - "\u0120blonde", - "\u00e1\u00b9\u013d", - "\u0120gak", - "\u0120s\u00c4\u00b1k", - "\u0120quoted", - "\u0120equipo", - "\u0120wishing", - "\u00c3\u0143cia", - "\u0120verbs", - "\u00e7\u00b5\u0126", - "\u0120Canadians", - "\u0120governing", - "\u0120Evans", - "Euro", - "\u0120genres", - "\u0120unterschied", - "\u0120Becky", - "\u00b3\u00bc\u00ea\u00b2\u012e\u00ec\u013c\u0136", - "\u0120einge", - "\u0120Raise", - "oland", - "\u0120Strateg", - "\u0120eres", - "\u0120Veterans", - "\u0120breakout", - "\u0120sant\u00c3\u00a9", - "\u0120adel", - "\u0120investigated", - "\u0120peur", - "\u0120agile", - "\u0120railroad", - "anska", - "\u0120\u00d0\u00b5\u00d0\u00b9", - "\u0120expos", - "atories", - "\u0120Content", - "\u0120truths", - "\u0120Trail", - "\u0120gua", - "\u0120pores", - "\u0120writings", - "\u0120Uhr", - "\u0120Thats", - "\u0120icing", - "OC", - "\u0120Production", - "\u0120carne", - "ISS", - "\u0120ningu\u00c3\u00a9m", - "non", - "\u0120vicious", - "\u00d7\u0137\u00d7\u0136", - "\u0120reconnect", - "\u0120centres", - "\u0120Kem", - "\u0120crease", - "\u0120\u00ec\u013f\u00b4\u00eb\u00af\u00b8", - "\u00d0\u00b0\u00d0\u00b9\u00d1\u0124\u00d0\u00b5\u00d1\u0123\u00d1\u012e", - "\u0120\u00d0\u00b1\u00d0\u00be\u00d1\u0122", - "\u0120Hay\u00c4\u00b1r", - "\u0120\u00d1\u0123\u00d1\u0125\u00d0\u00b4", - "\u0120\u00c3\u00banica", - "owa\u00c5\u0124", - "\u0120adher", - "hua", - "ZZ", - "\u0120preciso", - "\u0120currents", - "\u0120seasoned", - "\u0120IoT", - "\u0120Bishop", - "\u00e8\u00a8\u012a", - "sted", - "\u0120Bernard", - "\u00ec\u00a4\u013a", - "\u00e6\u00b2\u00bb", - "\u0120Glenn", - "\u0120kt\u00c3\u00b3rym", - "\u00e0\u00b8\u00b7\u00e0\u00b9\u012a", - "\u0120astrolog", - "\u0120Kot", - "\u00e5\u00a4\u013e", - "\u0120parfois", - "\u0120forwards", - "\u0120Wi\u00c4\u013b", - "\u0120\u00ce\u013a", - "\u0120nano", - "\u00e8\u00bb\u012f", - "sub", - "\u0120Brill", - "\u0120grit", - "\u0120cited", - "gado", - "\u0120melts", - "\u0120forc\u00c3\u00a9", - "\u00e2\u0138\u012a\u00e2\u0138\u012a", - "\u0120bajo", - "\u0120discretion", - "\u00b0\u00b0", - "ativity", - "\u0120situated", - "\u00e3\u0125\u00ab\u00e3\u0124\u00af", - "\u00d1\u012b\u00d0\u00b5\u00d0\u00b5", - "\u00e5\u013e\u00b0\u00e6\u0138\u00b9", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b8\u00d0\u00bd\u00d1\u0128\u00d0\u00b8\u00d0\u00bf", - "amaz", - "\u0120aquarium", - "\u0120dissolve", - "\u0120Gods", - "Super", - "\u0120amid", - "zk", - "\u0120\u00e3\u0123\u0126", - "\u00e9\u0142\u0132", - "ampf", - "\u0120hela", - "'!", - "\u0120developmental", - "\u0120Dise", - "\u0120\u00d1\u0122\u00d0\u00b0\u00d0\u00b1\u00d0\u00be\u00d1\u0124\u00d0\u00b0\u00d0\u00b5\u00d1\u0124", - "\u0120snapshot", - "\u00e5\u00a5\u00bd\u00e5\u00a5\u00bd", - "\u00d5\u00b8", - "\u0120Yue", - "\u0120Hulk", - "\u0120Doom", - "\u0120Felix", - "\u0120r\u00c3\u00a9f", - "Male", - "\u00e7\u00b7\u012c", - "phants", - "ENS", - "\u0120Mechan", - "\u0120Golf", - "\u00e5\u0128\u012f\u00e8\u00a6\u012d", - "\u0120generosity", - "\u00c3\u00a4tze", - "\u0120unlocked", - "\u0120\u00e3\u0124\u0134", - "\u00ed\u0125\u0123", - "ocalypse", - "Alright", - "\u0120\u00ea\u00b0\u013e\u00eb", - "\u0120\u00d7\u0132\u00d7\u0133\u00d7\u013e", - "\u0120Keeping", - "\u0120collaborating", - "chief", - "\u0120Fernando", - "\u0120chefs", - "\u0120\u00ed\u0136\u00bc\u00eb\u00b6\u0122", - "\u0120skipped", - "\u0120personn", - "\u0120axe", - "chez", - "\u0120extraction", - "\u0120AV", - "\u0120Gibbs", - "\u0120\u00ed\u013e", - "\u0120s\u00c4\u00b1", - "IAM", - "View", - "\u0120GRANT", - "\u0120\u00eb\u00aa\u00b8", - "\u0120verification", - "\u0120depicted", - "\u0120Moz", - "oux", - "\u0120tul", - "\u0120scanner", - "\u0120comedian", - "\u0120Volks", - "\u0120JEFF", - "\u00e8\u00a8\u0124\u00e9\u0138\u00b1", - "\u00a7\u0126", - "\u0120distraction", - "r\u00c3\u00a1", - "\u0120INTER", - "\u0120sincer", - "\u0120\u00d7\u0140\u00d7\u00aa", - "\u0120\u00d7\u00a9\u00d7\u0142", - "\u0120constructive", - "arf", - "\u0120\u00eb\u012a\u0126\u00eb", - "\u0120eco", - "ramos", - "\u0120renewed", - "inement", - "\u0120Ub", - "\u0120Pepper", - "\u00ec\u00a7\u0122\u00ea\u00b0\u0122", - "\u0120Darwin", - "\u0120merchand", - "\u0120v\u00c3\u00a1rias", - "\u00c3\u00a8ce", - "NG", - "\u0120\u00ec\u013e\u0126\u00ed\u0137\u00b4\u00ec\u0126\u013e", - "\u0120\u00d0\u00b0\u00d0\u00ba\u00d1\u0124\u00d0\u00b8\u00d0\u00b2", - "\u0120Unters", - "\u00d8\u00b9\u00d9\u0126", - "\u0120intric", - "omma", - "ieving", - "\u0120Caroline", - "\u00e5\u0135\u0123", - "\u0120PRES", - "\u0120performer", - "\u0120autour", - "\u00e3\u0123\u00be\u00e3\u0123\u013d\u00e3\u0124\u0135", - "\u0120utterly", - "\u0120synthesis", - "\u0120lesbian", - "\u0120retrieve", - "\u0120maneira", - "\u0120impair", - "\u0120mentoring", - "\u0120Souls", - "\u0120GoPro", - "\u00d1\u0122\u00d0\u00b0\u00d1\u0124\u00d1\u012e", - "\u0120cose", - "\u0120SSD", - "IRE", - "\u0120upfront", - "\u0120Aun", - "\u0120gamer", - "\u0120litt", - "\u0120aggression", - "\u0120Likewise", - "\u0120Betty", - "\u0120Dart", - "\u0120DLC", - "ishment", - "\u00ec\u0140\u00a5\u00ec\u013f\u0126", - "\u0120\u00e5\u00af\u00b9", - "\u00e7\u00bb\u0131", - "cream", - "\u0120Babylon", - "\u0120nug", - "brar", - "\u0120ayn\u00c4\u00b1", - "amily", - "bike", - "ahahaha", - "loyd", - "\u0120mira", - "\u0120perme", - "\u0120Gaming", - "\u0120firmware", - "Ma", - "\u0120assisted", - "atics", - "\u0120\u00ec\u0137\u0140\u00ec\u013e\u00bc\u00eb\u00a1\u013e", - "\u0120Mental", - "niejs", - "\u0120Iz", - "ow\u00c4\u0127", - "\u0120tougher", - "\u0120deed", - "\u00e8\u012d\u00a6", - "\u0120stylish", - "\u0120Tools", - "\u0120Hamp", - "\u0120sunscreen", - "\u0120articulate", - "iye", - "\u00d0\u00b8\u00d1\u0126", - "\u0120Spread", - "\u0120HAVE", - "\u0120swirl", - "\u0120sponsoring", - "\u00e4\u00bb\u012d", - "iovascular", - "mesi", - "\u0120relaxation", - "\u0120\u00d1\u0123\u00d0\u00b2\u00d0\u00be\u00d0\u00b8\u00d1\u0127", - "\u0120margins", - "\u0120sa\u00c4\u0141", - "\u0120Pride", - "\u0120\u00cf\u0126\u00ce\u00bf\u00cf\u0127\u00cf\u0124", - "\u00d0\u00b8\u00d1\u0128\u00d0\u00b8", - "enci", - "Does", - "\u0120corpse", - "\u0120endurance", - "\u0120\u00ed\u0140\u013a", - "\u00ec\u00b9\u00b4", - "\u0120haircut", - "\u0120interrupted", - "\u0120windy", - "\u0120Caleb", - "\u00cf\u0123\u00cf\u0129", - "\u0120Pourquoi", - "\u0120holistic", - "uclear", - "\u0120Whole", - "\u00e5\u00a3\u00ab", - "Act", - "\u0120gallon", - "cade", - "\u0120Regional", - "roads", - "\u0120Schne", - "\u00c3\u00a1ng", - "\u0120\u00d0\u00b8\u00d0\u00b7\u00d0\u00bc\u00d0\u00b5\u00d0\u00bd", - "\u00e3\u0124\u012a\u00e3\u0123\u0143", - "\u0120menus", - "\u0120splitting", - "\u0120priced", - "\u0120\u00ce\u0135", - "\u0120username", - "\u0120\u00d0\u0140\u00d1\u0129", - "\u0120compressed", - "yin", - "\u0120guardian", - "\u0120goof", - "\u0120checklist", - "\u0120interchange", - "\u0120expedition", - "\u0120extern", - "\u0120infrared", - "engo", - "\u0120denying", - "\u0120packets", - "onent", - "BB", - "\u0120Incre", - "\u0120sini", - "\u00c3\u0141er", - "\u00c3\u00a8g", - "maal", - "generation", - "\u0120minorities", - "\u0120llevar", - "\u0120nomination", - "\u0120consid", - "\u0120\u00d7\u013e\u00d7\u00a2", - "mu\u00c5\u0141", - "\u0120Esc", - "\u0120numerator", - "\u0120kaik", - "\u0120kt\u00c3\u00b3rych", - "iesen", - "\u0120v\u00c3\u00aa", - "\u0120USS", - "\u0120Private", - "\u0120\u00d0\u00be\u00d0\u00b4\u00d0\u00bd\u00d0\u00be", - "\u0120al\u00c3\u00a9m", - "\u00c3\u0143tulo", - "\u0120limb", - "\u0120forgiven", - "\u0120disclosure", - "\u00cf\u0126\u00ce\u00af", - "\u0120ning\u00c3\u00ban", - "\u0120therapeutic", - "\u0120negotiating", - "\u0120Nike", - "enseful", - "\u0120incap", - "\u0120flagship", - "town", - "\u00e2\u012a", - "\u0120\u00cf\u0122\u00ce\u00bf\u00ce\u00bb", - "\u0120wolves", - "\u0120violations", - "\u0120Arnold", - "\u0120intervene", - "\u0120heater", - "\u0120recursos", - "\u0120maid", - "\u00ea\u00b2\u00bc", - "\u0120\u00d0\u00b4\u00d0\u00b0\u00d0\u00b2\u00d0\u00b0\u00d0\u00b9\u00d1\u0124\u00d0\u00b5", - "\u0120Celebr", - "\u0120cape", - "\u0120Sty", - "ainen", - "site", - "bij", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00bb\u00d1\u012e\u00d0\u00b7", - "\u0120framed", - "\u0120publishers", - "\u0120\u00d1\u0129\u00d1\u0125\u00d1\u0124\u00d1\u012e", - "\u0120temptation", - "\u0120certeza", - "\u0120exempt", - "\u00ec\u012c\u00b9", - "selling", - "\u0120Task", - "hoon", - "\u0120Coc", - "\u0120Parks", - "\u0120repetition", - "\u0120\u00d1\u0124\u00d1\u0125\u00d0\u00b4\u00d0\u00b0", - "\u0120ensl", - "\u0120de\u00c4\u0141i\u00c5\u0141", - "\u0120Orlando", - "\u0120Mainten", - "\u00e6\u0143\u00a2", - "ocument", - "\u0120HC", - "\u0120scooter", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d0\u00bf\u00d0\u00b8\u00d1\u0123", - "\u0120tighter", - "\u0120tease", - "\u0120removes", - "\u0120kijken", - "\u0120\u00d1\u0123\u00d1\u0125\u00d1\u012b\u00d0\u00b5\u00d1\u0123\u00d1\u0124\u00d0\u00b2", - "\u0120th\u00c3\u00a9", - "\u0120\u00d0\u00b2\u00d1\u012d\u00d0\u00b3\u00d0\u00bb\u00d1\u0131\u00d0\u00b4", - "\u0120relieve", - "\u0120mit\u00c3\u00a4", - "\u0120stationary", - "\u00c3\u00b6ff", - "pable", - "\u0120arter", - "\u0120d\u00c3\u00a9f", - "rative", - "\u0120conect", - "\u0120saddle", - "\u0120Diane", - "\u0120commemor", - "fendim", - "S\u00c3\u0143", - "\u0120\u00ed\u0123\u00b4\u00eb", - "\u0120mange", - "atte", - "\u0120arrogant", - "\u0120robotic", - "\u0120gi\u00c3\u0142", - "\u00e6\u013a\u00af\u00e7\u013c\u0126", - "\u0120neighbourhood", - "isson", - "\u0120\u00d0\u00b4\u00d0\u00b2\u00d0\u00b8\u00d0\u00b6", - "\u0120RI", - "\u0120Norman", - "brand", - "amation", - "\u0120razor", - "\u0120murders", - "\u0120\u00d1\u0124\u00d1\u0125", - "\u0120wszystkim", - "\u0120utilities", - "\u0120microscop", - "\u00ea\u00bf", - "\u0120daqui", - "ollar", - "\u0120\u00d0\u0136\u00d0\u00b0\u00d0\u00b2\u00d0\u00b0\u00d0\u00b9\u00d1\u0124\u00d0\u00b5", - "\u0120ann\u00c3\u00a9e", - "\u0120kilometres", - "\u0120homosexual", - "\u0120architects", - "\u00e3\u0123\u00a1\u00e3\u0123\u00af", - "\u0120niye", - "LER", - "\u0120microphones", - "\u0120Stunden", - "\u0120consecutive", - "ienda", - "v\u00c3\u00a4nd", - "DER", - "\u0120lifts", - "\u0120Meat", - "\u0120savez", - "\u00ed\u0138\u012a\u00eb\u012f\u013a", - "Men", - "\u0120dismant", - "\u00ea\u00b1\u00b0\u00eb\u00a5\u00bc", - "\u0120insulation", - "\u0120scall", - "\u0120spooky", - "\u0120parc", - "\u0120ballet", - "\u0120WhatsApp", - "\u0120franc", - "\u0120deliberate", - "\u0120\u00ed\u0127\u012e", - "\u0120mars", - "\u0120Zur", - "Pr", - "disciplinary", - "\u0120obsession", - "\u00d0\u00bc\u00d0\u00b5", - "\u0120marching", - "\u0120Emergency", - "iguous", - "\u0120szy", - "\u0120Lands", - "\u0120boarding", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d1\u0129\u00d1\u0124\u00d0\u00b8", - "\u0120envy", - "\u0120compassionate", - "\u0120merci", - "\u0120desirable", - "dale", - "\u0120can\u00c4\u00b1m", - "\u0120Antar", - "temps", - "\u0120configured", - "\u0120Compared", - "neh", - "icating", - "\u0120nickel", - "\u00d9\u012a\u00d9\u0124", - "\u00d9\u0125\u00d9\u012a\u00d9\u0128", - "opes", - "\u0120formulas", - "\u0120\u00d0\u0137\u00d1\u0123\u00d1\u0124\u00d1\u012e", - "\u0120pobl", - "\u0120PJ", - "\u0120Lud", - "\u00e4\u00bb\u012c\u00e5\u013d\u0140", - "\u0120Brid", - "\u0120Hog", - "\u0120Bris", - "Jen", - "\u0120shading", - "\u0120Yas", - "\u0120disturbed", - "\u0120recommending", - "\u0120c\u00c3\u00a9", - "\u0120HOW", - "\u00ec\u0139\u012a\u00ec\u0138\u00b4", - "\u0120reversed", - "\u0120Interestingly", - "ioxid", - "\u00e5\u0127\u0143", - "\u0120\u00ec\u013a\u00a4\u00ec\u00bc\u0122\u00ec\u013f\u00b4", - "\u00e1\u00ba\u00bfu", - "xx", - "\u0120ouais", - "\u0120YouTubers", - "\u0120Rosa", - "\u0120Haupt", - "jadi", - "\u0120vlogs", - "\u0120cultura", - "\u0120Leadership", - "\u0120Hep", - "\u0120illum", - "\u00b4\u00eb\u0131\u013b", - "\u0120customized", - "\u0120marca", - "\u0120quatro", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d0\u00b3", - "\u0120SpaceX", - "\u0120Eigen", - "asting", - "\u0120oldu\u00c4\u0141u", - "\u0120forts", - "\u00e3\u0123\u012b", - "riment", - "iencia", - "\u0120tenir", - "roffen", - "\u01201979", - "\u0120cie", - "\u0120\u00eb\u0132\u013a\u00ea\u00b3\u0142", - "\u0120escri", - "\u00cf\u012e\u00cf\u0124", - "\u00ed\u0131\u00ac", - "uzzy", - "Cong", - "\u00ec\u013f\u00b8\u00ec\u013f\u00b4", - "Great", - "sil", - "\u00c3\u00a9ch", - "\u00e3\u0123\u00a8\u00e3\u0123\u012d", - "\u0120multic", - "\u0120Disk", - "\u00b2\u0137", - "\u0120fazla", - "\u0120levant", - "\u0120abajo", - "urry", - "stru", - "\u0120\u00eb\u00a8\u00b9\u00eb\u012c\u0136", - "\u0120accessory", - "\u0120\u00d0\u00b4\u00d0\u00b2\u00d0\u00b8\u00d0\u00b3", - "\u0120Rid", - "2019", - "\u0120downstream", - "\u00e6\u0137\u00b8", - "\u0120kaz", - "utan", - "\u0120charcoal", - "\u0120afect", - "wu", - "\u0120contexts", - "\u0120feared", - "\u0120\u00ec\u0126\u00a4", - "\u0120histories", - "\u0120fas", - "ensible", - "\u0120cocoa", - "illar", - "geons", - "\u0120spirituality", - "\u0120Pew", - "\u0120pharmacy", - "\u0120passions", - "\u0120bos", - "\u0120all\u00c3\u00a1", - "\u0120thriving", - "\u0120React", - "\u0120occupy", - "\u0120withdrawal", - "\u0120allowance", - "\u0120Fraktion", - "\u0120buddies", - "\u0120idle", - "\u0120dissolved", - "\u0120prevalent", - "\u0120militar", - "\u0120sensing", - "\u0120pojaw", - "\u0120ancora", - "\u0120abundant", - "\u0120hairst", - "\u00e3\u0123\u0124\u00e3\u0124\u012e", - "\u0120twee", - "\u0120n\u00c3\u00a4chste", - "\u0120M\u00c3\u00b6glichkeit", - "\u0120hoo", - "ufficient", - "\u0120fantast", - "\u0120edible", - "\u0120\u00eb\u0138\u00a8\u00ec\u0138\u00b4\u00ec", - "\u00ec\u013d\u0125", - "\u0120vein", - "ucci", - "\u0120devotion", - "\u0120concealer", - "income", - "\u0120recycled", - "\u0120\u00ec\u012c\u00a4\u00ed\u0125\u0122", - "\u0120pontos", - "\u0120dessus", - "\u0120v\u00c3\u00a9rit", - "\u0120reflections", - "\u0120AA", - "\u0120takeaway", - "bare", - "\u0120Contact", - "eil", - "\u0120Hear", - "\u0120mirac", - "\u0120Gerilim", - "\u0120\u00d1\u0123\u00d0\u00b0\u00d0\u00bc\u00d1\u012d\u00d0\u00b9", - "\u0120vivo", - "\u0120kilograms", - "\u0120Crim", - "\u00c3\u00bbt", - "78", - "\u0120sincerely", - "raz", - "\u0120\u00eb\u00b3\u00b5", - "\u0120arriv", - "\u0120conception", - "\u0120Persian", - "\u0120sj\u00c3\u00a4l", - "\u0120starring", - "\u0120\u00ec\u0137\u0126\u00eb\u00ac\u00b4", - "\u0120Forever", - "\u00d0\u00b5\u00d1\u0123\u00d1\u0124\u00d1\u012e", - "\u0120veil", - "\u0120subtit", - "odka", - "\u0120\u00d0\u00be\u00d1\u0124\u00d0\u00bd\u00d0\u00be\u00d1\u012a", - "\u0120cooks", - "\u00d0\u00b5\u00d0\u00bd\u00d1\u0131", - "Kay", - "\u0120ni\u00c3\u00b1os", - "\u0120Phone", - "\u0120stitching", - "\u0120fingerprint", - "\u00e9\u00a2\u013a", - "\u00ce\u00bb\u00ce\u00ac", - "\u0120dedicate", - "\u0120Lob", - "\u0120blacks", - "\u0120Ble", - "bout", - "\u0120\u00c4\u0133ang", - "\u0120eks", - "\u0120squash", - "\u0120K\u00c3\u00bc", - "odi", - "\u0120n\u00c6\u00b0\u00e1\u00bb\u013dc", - "\u0120voyage", - "\u0120playful", - "\u0120\u00d8\u00a5\u00d9\u0126\u00d9\u012b", - "anic", - "\u0120condemn", - "\u0120B\u00c3\u00b6yle", - "\u0120Polize", - "\u00e3\u0124\u00bf\u00e3\u0125\u00bc", - "\u0120ayuda", - "\u0120pam", - "\u00e0\u00b9\u0126\u00e0\u00b8\u013d", - "\u0120Kathy", - "\u00d0\u00b5\u00d0\u00b4\u00d0\u00b8\u00d0\u00bd", - "\u00d0\u00bd\u00d0\u00be\u00d0\u00b2\u00d0\u00b0", - "\u0120brig", - "eger", - "\u0120eagle", - "\u0120visions", - "\u0120\u00ed\u0137\u0143\u00ec\u0125\u0123", - "\u0120shitty", - "\u0120hott", - "\u0120Britt", - "utors", - "ENTE", - "\u00e6\u013d\u00b2", - "\u0120phon", - "\u0120Bing", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00b4\u00d0\u00b4\u00d0\u00b5\u00d1\u0122\u00d0\u00b6", - "spring", - "\u00e6\u0138\u00af", - "etten", - "\u0120pilgr", - "\u0120ediyor", - "\u00d0\u00b5\u00d0\u00bd\u00d1\u0124\u00d1\u012d", - "aggio", - "\u0120jul", - "\u0120comprend", - "teil", - "\u0120\u00d8\u00b2", - "\u0120performers", - "\u0120infamous", - "\u0120MK", - "\u00e7\u00aa", - "\u00e6\u00b3\u0123", - "otle", - "eff", - "\u0120Hash", - "\u0120coward", - "\u0120BRA", - "\u0120DD", - "\u0120comida", - "\u0120plata", - "\u0120flap", - "\u0120Mehr", - "ribution", - "\u0120Yemen", - "\u0120mysteries", - "\u0120\u00c4\u00b0yi", - "\u0120stell", - "\u0120eyeliner", - "\u0120deles", - "\u0120nailed", - "\u0120illnesses", - "\u0120stacks", - "\u0120trabajar", - "flower", - "ciu", - "\u0120crude", - "\u0120substantially", - "\u0120homem", - "\u0120nephew", - "\u0120stamps", - "\u0120carbs", - "\u00d1\u012e\u00d1\u0124\u00d0\u00b5", - "mooth", - "\u0120tunnels", - "acie", - "\u00e6\u00b3\u00a2", - "\u0120Se\u00c3\u00b1", - "\u0120Hera", - "\u0120\u00ec\u0137\u0126\u00eb\u012d\u012a\u00ec\u0139\u0132\u00ec\u013c\u0136", - "\u0120Wyoming", - "\u0120HDMI", - "\u0120Lis", - "uci\u00c3\u00b3n", - "\u0120steer", - "\u00d0\u00be\u00d1\u0130", - "\u00d0\u00b8\u00d1\u0124\u00d0\u00b0", - "NT", - "\u0120\u00ec\u0138\u00bc\u00ea\u00b5\u00b4", - "\u0120palms", - "\u0120neon", - "\u00d0\u00be\u00d0\u00b2\u00d0\u00b0\u00d0\u00bd\u00d0\u00b8\u00d1\u0131", - "\u0120filtering", - "\u0120jouer", - "\u0120H\u00c3\u00b6", - "\u0120\u00d0\u00bd\u00d0\u00b5\u00d1\u0123", - "\u00ea\u00b2\u0142\u00ec\u0138\u00b4\u00ec\u013c\u0136", - "\u012081", - "\u0120storyline", - "\u0120przep", - "\u0120thanking", - "\u0120Boeing", - "\u0120softly", - "jem", - "\u00d0\u00b0\u00d0\u00bb\u00d1\u012e\u00d0\u00bd\u00d1\u012d\u00d1\u0127", - "\u0120flashlight", - "\u0120\u00d0\u00bf\u00d1\u0125", - "\u0120WOMAN", - "\u00e1\u00ba\u00afc", - "\u00c3\u0143ch", - "\u0120luxurious", - "\u0120w\u00c3\u00bcn", - "\u0120impactful", - "\u0120conson", - "reu", - "irring", - "ifter", - "\u0120constituents", - "\u00e8\u0132\u00bd", - "\u012094", - "\u0120Tou", - "gom", - "\u0120\u00ec\u0125\u013f\u00ea\u00b0\u0123\u00ec\u013f\u0126", - "\u0120stereotypes", - "\u0120mo\u00c5\u00bcli", - "\u00e5\u012a\u0128\u00e4\u00ba\u00ab", - "\u0124\u00a8", - "\u0120pencils", - "\u0120\u00d1\u0123\u00d0\u00bb\u00d0\u00be\u00d0\u00b6", - "\u0120ihrem", - "\u0120Besch", - "\u0120Koh", - "\u0120Entscheid", - "\u0120lek", - "\u0120f\u00c3\u00b6rs", - "\u0120totalmente", - "\u0120lively", - "\u0120entropy", - "\u0120discern", - "\u0120\u00d0\u0139\u00d0\u00bd\u00d0\u00b0", - "\u0120dov", - "\u0120mythology", - "\u00e8\u00a8\u013a\u00e5\u00be\u0139", - "apanese", - "\u0120approximate", - "\u00d0\u00b0\u00d1\u0124\u00d0\u00b8\u00d0\u00b2", - "ifiable", - "\u0120Seo", - "\u00e5\u0122\u0134", - "\u00b4\u00ec\u012d\u00ac\u00ed\u0140\u012a", - "\u0120\u00ec\u013a\u00b7", - "\u0120temporal", - "\u0120iT", - "\u0120estat", - "\u00d0\u00ba\u00d0\u00b8\u00d0\u00bc", - "\u0120sprink", - "\u0120grund", - "\u0120infantry", - "\u0120schaffen", - "\u00e7\u00b4\u0126", - "\u0120ank", - "riages", - "\u0120Yeon", - "\u0120Moroc", - "\u0120invasive", - "\u0123\u0136", - "\u0120parenting", - "\u0120Ris", - "ibile", - "\u0120mods", - "\u00e5\u00bd\u00a2", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d0\u00b2\u00d0\u00b5\u00d1\u0122", - "\u0120Thing", - "\u0120Wherever", - "\u0120acknowledging", - "\u0120pawn", - "ummer", - "orb", - "69", - "\u0120retrouve", - "\u0120relies", - "\u0120Highway", - "\u0120awe", - "\u00e3\u0123\u00a7\u00e3\u0123\u013b\u00e3\u0123\u012d", - "itaire", - "\u0120applicant", - "\u0120aisle", - "worm", - "\u0120payload", - "\u0120carre", - "\u0120Bach", - "\u00e6\u0142\u00bc", - "\u0120\u00ec\u00b9\u013e\u00ea\u00b5\u00ac\u00eb", - "\u00d0\u00bd\u00d0\u00b8\u00d0\u00b5", - "\u0120it\u00c3\u0143s", - "onnaise", - "sol", - "\u00e8\u0131\u00af", - "algia", - "\u0120rocking", - "\u0120besten", - "rites", - "^^", - "\u00d0\u00b8\u00d0\u00bd\u00d0\u00be\u00d0\u00b9", - "\u0120baixo", - "\u0120\u00ea\u00b8\u00b0\u00ec\u0138\u00b5", - "\u00d0\u00be\u00d1\u0124\u00d1\u0122\u00d0\u00b8", - "sim", - "\u0120incarn", - "\u00eb\u012d\u00a4\u00ec\u013f\u012e", - "\u0120lick", - "sided", - "\u012071", - "forder", - "\u0120resonance", - "\u0120tegen", - "\u0120metaph", - "owser", - "\u0120\u00d7\u0132\u00d7\u0142\u00d7\u0139\u00d7\u0142\u00d7\u0137", - "?\u00e3\u0122\u012f", - "\u0120spielen", - "\u0120volley", - "\u0136\u00ec\u013f\u00b4\u00ed\u0123\u00ac\u00ec\u0139\u0127", - "looked", - "\u0120sentenced", - "\u0120multiplying", - "\u0120ideals", - "\u0120wahrscheinlich", - "\u0120deposits", - "bilir", - "\u0120effet", - "illon", - "\u012a\u00eb\u00a7\u012e", - "\u0120testimon", - "\u0120zawsze", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d1\u0128\u00d0\u00b5\u00d1\u0123\u00d1\u0123", - "\u0120Lav", - "\u00e4\u00b8\u012f\u00e9\u012e\u00af", - "\u0120travailler", - "\u0120laisse", - "\u0120Mountains", - "\u0120\u00d1\u0122\u00d0\u00be\u00d0\u00b1", - "\u0120examined", - "itus", - "Was", - "\u00d0\u00bb\u00d1\u012d", - "\u0120attributed", - "\u0120\u00ec\u012c\u00b9", - "\u0120Baron", - "\u0120gep", - "\u0120attent", - "\u0120Collection", - "\u0120theat", - "\u0120Cai", - "\u0120wells", - "\u0120humano", - "\u00e7\u0139\u0127", - "\u0120Hast", - "\u0120\u00d1\u0127\u00d0\u00be\u00d1\u0124\u00d1\u0131", - "czas", - "\u0120permits", - "\u0120legg", - "\u0120epo", - "\u0120Fen", - "\u0120thi", - "\u0120Foi", - "\u0120\u00c3\u00a9lect", - "\u012083", - "\u0120overth", - "\u0120\u00e8\u00ac\u013f\u00e8\u00ac\u013f", - "\u0120tenant", - "\u00e8\u00b2\u00b7", - "Next", - "\u0120praised", - "security", - "\u0120Impact", - "\u00e4\u00b8\u00ba\u00e4\u00bb\u0122\u00e4\u00b9\u012a", - "\u0120vouch", - "\u0120neg\u00c3\u00b3", - "\u0120unve", - "\u0120criticize", - "\u0120Kenya", - "\u0120tactic", - "\u0120logr", - "\u0120pois", - "\u0120papa", - "speaks", - "\u00f0\u0141\u0133", - "ispers", - "\u0120surplus", - "\u0120colder", - "\u00e5\u012f\u0139", - "\u00e5\u0132\u00ac", - "plets", - "\u0120Vienna", - "\u0120Lead", - "\u0120aerial", - "\u0120Tah", - "\u00d0\u00b5\u00d0\u00bd\u00d1\u0124\u00d0\u00be\u00d0\u00b2", - "\u0120Greeks", - "Cam", - "\u0120m\u00c3\u00a1xim", - "\u0120kuin", - "chio", - "\u0120demonstrates", - "anos", - "\u0120Cert", - "\u0120\u00d1\u012f\u00d0\u00bd", - "\u0120blogs", - "\u0120\u00ec\u0126\u013e\u00ec\u013c\u00b8", - "\u0120beams", - "\u00d0\u00b8\u00d0\u00ba\u00d0\u00be\u00d0\u00b2", - "\u0120prompted", - "\u0120frightening", - "\u0120Porsche", - "\u00e3\u0123\u012a\u00e3\u0123\u00a6", - "lar\u00c4\u00b1n\u00c4\u00b1", - "\u0120chilling", - "isphere", - "\u0120flashing", - "\u0120Kard", - "bread", - "\u0120exh", - "\u0120tycker", - "\u0120ecological", - "\u0120Mae", - "\u0120\u00d7\u0140\u00d7\u0132\u00d7\u0137\u00d7\u0135", - "\u0120\u00eb\u0124\u013a\u00eb\u0131\u0126", - "\u00d0\u00bb\u00d0\u00be\u00d0\u00bd", - "yss", - "\u0120pergunt", - "\u0120prix", - "izzard", - "\u0120cancers", - "\u012091", - "susp", - "\u0120Item", - "\u00c5\u0141a", - "\u0120pest", - "\u0120tak\u00c4\u0127", - "\u0120lymph", - "\u0120Patri", - "fill", - "\u0120reconna", - "\u0120optimism", - "\u0120mimic", - "\u0120\u00ec\u00b2\u013e", - "\u0120Madame", - "ocy", - "lining", - "\u00e5\u0133\u012c\u00e8\u00a8\u00b4", - "erme", - "\u0120folders", - "\u0120cz\u00c5\u0124", - "uchar", - "\u0120curso", - "\u0120breach", - "\u00d0\u00bd\u00d0\u00b8\u00d1\u0124\u00d1\u012e", - "\u0120pami\u00c4\u013b", - "\u0120elig", - "\u0120autop", - "Flow", - "\u0120programmed", - "\u0120Process", - "\u0120figur", - "\u0120SF", - "\u0120Eles", - "\u0120programmes", - "\u0120dizzy", - "\u00ec\u012d\u013e\u00ea\u00b0\u0126", - "\u0120\u00d0\u00bb\u00d0\u00b8\u00d0\u00b1\u00d0\u00be", - "\u0120sniff", - "\u0120Sebastian", - "\u0120Hye", - "\u01204000", - "\u0120permite", - "\u00e6\u00a2\u013f", - "\u0120\u00d0\u00b7\u00d0\u00b0\u00d1\u012b", - "\u0120guit", - "\u0120Dais", - "\u0120accordance", - "\u0120modular", - "ogeneous", - "\u00e6\u012d\u012f", - "\u0120pouquinho", - "\u0120artillery", - "\u0120lubric", - "\u0120volcan", - "\u0120NH", - "\u00f0\u0141\u00a4", - "\u0120dean", - "Rh", - "\u0120ministre", - "\u00e5\u013f\u0132", - "\u0120Inv", - "\u0120Bulgar", - "\u0120Daten", - "\u00e8\u0130", - "Im", - "\u0120originated", - "\u0120Nixon", - "integr", - "\u0120lacks", - "\u0120Nacht", - "\u00ec\u0138\u00b4\u00eb\u0124\u013a", - "camera", - "\u0120radish", - "kiye", - "\u0120anges", - "\u0120pr\u00c3\u00a9f", - "juk", - "\u0120Bee", - "\u0120BU", - "\u0120\u00d0\u00b2\u00d0\u00be\u00d1\u0123\u00d0\u00bf", - "\u0120BT", - "\u00c3\u00aames", - "\u0120St\u00c3\u00bcck", - "\u0120Ink", - "\u00e6\u012a\u0138\u00e8\u0122\u0127", - "\u0120Sergeant", - "\u0120Multip", - "\u0120hi\u00c3\u00a7bir", - "\u0120\u00d0\u00a1\u00d0\u00b0\u00d0\u00bc", - "\u0120D\u00c3\u00a9", - "olph", - "\u00ec\u0138\u00b8", - "\u0120impat", - "\u0120\u00ec\u0137\u012c\u00ea\u00b3\u0142", - "\u0120\u00d1\u0124\u00d0\u00b0\u00d0\u00ba\u00d0\u00be\u00d0\u00b3\u00d0\u00be", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d0\u00b2\u00d0\u00b5\u00d1\u0122\u00d0\u00bd\u00d0\u00be\u00d0\u00b5", - "\u0120unpredictable", - "\u0120mend", - "\u0120\u00ec\u0139\u0128\u00ec\u0138\u00b4\u00ec\u013c\u0136", - "\u0120jakie\u00c5\u013d", - "\u0120anni", - "\u0120donn\u00c3\u00a9", - "\u0120Kirsty", - "\u0120rectangular", - "\u0120empezar", - "\u0120Exchange", - "\u00ea\u00b0\u0136", - "\u0120\u00c3\u00a9conom", - "\u00e3\u0123\u0135\u00e3\u0124\u0135", - "elin", - "reibt", - "\u0120\u00d7\u0136\u00d7\u00a4", - "\u0120cemetery", - "\u0120espa\u00c3\u00b1ol", - "olin", - "\u00d0\u00bb\u00d1\u0130\u00d0\u00b4", - "\u0120gr\u00c3\u00a2ce", - "allen", - "\u0120Philos", - "\u0120Erst", - "\u0120\u00ec\u0125\u012a", - "\u0120Vid", - "Give", - "OH", - "\u00ce\u00bc\u00ce\u00bf", - "\u0120Pare", - "\u0120metabolism", - "\u0120maple", - "\u0120axle", - "\u0120Dy", - "\u0120komme", - "\u00cf\u0130\u00ce\u00bd", - "\u0120greatness", - "\u0120verified", - "\u0120sp\u00c3\u00a9", - "\u0120Fahrenheit", - "\u0120Bren", - "\u0120Confeder", - "\u0120histoire", - "\u0120eliminating", - "\u0120Adding", - "\u0120Abi", - "\u00e6\u013f\u0130", - "\u0120hospitality", - "tim", - "\u0120bonito", - "\u0120partes", - "\u0120\u00d0\u00b4\u00d1\u0122\u00d1\u0125\u00d0\u00b3\u00d0\u00b8\u00d1\u0127", - "\u0120Shay", - "\u0120Sed", - "\u0120regrets", - "\u00d1\u0131\u00d0\u00bc\u00d0\u00b8", - "\u0120tenants", - "\u00e9\u0122\u0141", - "\u0120PTS", - "\u0120devi", - "\u0120Late", - "uez", - "\u0120s\u00c3\u00b6yl", - "\u00e3\u0124\u00bb", - "\u0120\u00ec\u0140\u00ac\u00eb\u00b0\u012e", - "\u0120toggle", - "\u0120masking", - "\u00d0\u00b0\u00d0\u00bb\u00d1\u012e\u00d0\u00bd\u00d0\u00be\u00d0\u00b3\u00d0\u00be", - "\u0120pers\u00c3\u00b6n", - "\u0120american", - "fik", - "\u0120RGB", - "enson", - "\u0120KA", - "wwww", - "\u0120\u00d1\u0122\u00d0\u00b5\u00d0\u00b3", - "metics", - "\u0120educator", - "\u00e3\u0124\u00b7\u00e3\u0125\u00ab\u00e3\u0124\u00af", - "park", - "\u00d0\u00b5\u00d0\u00bb\u00d1\u012e\u00d0\u00b7\u00d1\u0131", - "arus", - "\u00d1\u0122\u00d0\u00b5\u00d1\u0124", - "\u0120feito", - "\u0120choir", - "\u0120largo", - "\u0120eens", - "\u0120watts", - "\u0120Single", - "\u0120susceptible", - "icer", - "\u0120\u00d0\u00b2\u00d0\u00ba\u00d0\u00bb\u00d1\u0130\u00d1\u0129", - "\u0120pus", - "\u00ed\u013b\u013a", - "Eng", - "\u0120fantas", - "\u0120specification", - "\u0120confronted", - "\u0120Columbus", - "\u00d0\u00b8\u00d0\u00b2\u00d0\u00b5\u00d1\u0124", - "ar\u00c4\u00b1m", - "\u0120caffeine", - "munition", - "\u0120migrants", - "lide", - "itations", - "\u0120Geme", - "\u00e1\u00ba\u00ab", - "\u0120planner", - "\u0120stimulate", - "\u0120aproxim", - "ceu", - "\u0120Nom", - "\u0120vog", - "\u0120\u00d1\u0122\u00d0\u00b0\u00d1\u0123\u00d1\u0124", - "\u0120ense\u00c3\u00b1", - "\u0120sellers", - "\u0120guten", - "zd", - "Cal", - "\u0120descript", - "\u0120reconciliation", - "zinho", - "\u00e1\u00b9\u0129a", - "\u00e3\u0123\u013a\u00e3\u0124\u0125\u00e3\u0123\u0124", - "acyj", - "\u0120COL", - "saw", - "\u0120\u00ed\u013b\u0137\u00ec\u013f\u00b8", - "\u0120varit", - "\u0120partnering", - "\u0120detention", - "\u0120bombing", - "clapping", - "iencies", - "ondu", - "AME", - "\u0120\u00ea\u00b0\u013b\u00ec\u012c\u00b5\u00eb\u012d\u012a\u00eb\u012d\u00a4", - "c\u00c3\u0143a", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d1\u0123\u00d1\u0124\u00d0\u00be", - "\u0120ASMR", - "\u0120homepage", - "\u0120si\u00c3\u00a8", - "antha", - "\u0120Poll", - "\u0120igen", - "cych", - "\u0120\u00ea\u00b0\u0133\u00ec\u0140\u0132\u00ea\u00b8\u00b0", - "\u0120considerably", - "\u00e4\u00bb\u0138\u00e7\u013c\u0126", - "\u0120Arist", - "\u0120withstand", - "\u0120qualitative", - "\u0120Kraft", - "\u0120\u00d1\u012f\u00d0\u00bb\u00d0\u00b5\u00d0\u00ba\u00d1\u0124", - "\u0120Bead", - "\u00d0\u00b5\u00d0\u00ba\u00d1\u0124\u00d0\u00b8\u00d0\u00b2", - "\u0120crushing", - "\u00ec\u00b3\u0132", - "\u0120navy", - "\u00d9\u012a\u00da\u00ba", - "sho", - "\u0120oak", - "ippers", - "\u0120soils", - "\u0120pigment", - "\u0120evitar", - "\u00e3\u0125\u0129", - "\u0120fuse", - "\u0120Dale", - ":\"", - "\u0120compl\u00c3\u00a8tement", - "\u0120kel", - "\u00e0\u00b9\u0128", - "\u0120quatre", - "\u0120UM", - "\u0120\u00eb\u00a7\u0132\u00eb", - "\u00e6\u0142\u00b9", - "\u00c3\u0143r", - "\u0120leisure", - "\u0120Housing", - "\u0120folds", - "estion", - "ARS", - "\u0120mash", - "urpose", - "\u0120accumulated", - "\u0120Stuff", - "\u00e8\u00aa\u0140", - "\u0120tapes", - "\u0120\u00d1\u0123\u00d0\u00b8\u00d0\u00bb\u00d1\u012e\u00d0\u00bd\u00d0\u00be", - "\u0120LOVE", - "\u01201982", - "\u0120scars", - "\u0120capitalist", - "\u0120Ned", - "\u0120soften", - "\u0120notably", - "\u0120forc\u00c3\u00a9ment", - "\u0120Raum", - "\u0120\u00d0\u00bd\u00d0\u00b5\u00d0\u00be\u00d0\u00b1\u00d1\u0127\u00d0\u00be\u00d0\u00b4", - "\u0120trademark", - "\u0120fertig", - "\u0120?!", - "\u00e6\u0139\u0142", - "\u0120reinforced", - "\u0120recharge", - "\u0120Putting", - "\u0120villains", - "\u0120handic", - "\u0120advertisement", - "\u00d8\u00aa\u00d9\u012c", - "\u0120\u00d1\u0123\u00d1\u0125\u00d0\u00bc", - "\u0120Riley", - "\u00d7\u0137\u00d7\u0133\u00d7", - "\u00e4\u00ba\u00ac", - "Os", - "\u00d8\u00a7\u00d8\u00b2", - "Boy", - "\u0120squish", - "ocket", - "\u0120testify", - "\u00e6\u00bc\u0136", - "\u0120\u00d7\u013e\u00d7\u0140\u00d7", - "\u0120\u00d0\u00bc\u00d0\u00b0\u00d1\u0123\u00d1\u0123", - "manuel", - "\u0120Arkansas", - "iffe", - "\u0120analysts", - "\u0120Deaf", - "\u0120j\u00c3\u00b3", - "\u0120groceries", - "\u0120Wheel", - "\u0120\u00d1\u0122\u00d0\u00b8\u00d1\u0123", - "\u0120c\u00c3\u00b2n", - "\u0120Cob", - "\u0120prisons", - "\u00c3\u00a8ve", - "\u0120Cabinet", - "\u0120posed", - "\u0120guerre", - "\u0120Lloyd", - "\u0120clerk", - "\u0120crises", - "\u0120Sho", - "\u0120Ore", - "\u0120Football", - "\u0120Advis", - "\u0120Zheng", - "\u00e8\u012f", - "\u0120AMY", - "\u0120unfor", - "\u0120monaster", - "\u0120compile", - "\u0120immortal", - "atable", - "\u0120parano", - "\u0120tiver", - "\u0120Steph", - "\u0120Fu\u00c3\u0141", - "\u0120discontin", - "\u0120ripe", - "\u0120hacking", - "\u0120siendo", - "\u0120seguro", - "altres", - "\u0120anderes", - "\u0120\u00eb\u00a6\u00ac\u00eb", - "\u0120exports", - "\u00e6\u0143\u00a5", - "\u0120tabii", - "\u0120\u00ea\u00b8\u00b0\u00eb\u012d\u00a4\u00eb", - "\u0120bothering", - "\u0120pickle", - "\u0120BRIAN", - "\u0120altar", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b8\u00d0\u00b1", - "\u0120transferring", - "\u0120Vors", - "\u0120\u00d9\u0129\u00d9\u012a", - "\u0120Za", - "\u0120Frances", - "\u0120browse", - "emit", - "\u0120chewing", - "\u0120Freddy", - "\u0120editors", - "\u00c3\u00a4lle", - "\u0120\u00ed\u012e\u0122", - "\u0120Sque", - "\u0120Cultural", - "awk", - "\u0120Sache", - "\u0120Carbon", - "\u00e1\u00ba\u00aft", - "FL", - "\u0120NGO", - "pe\u00c5\u0124", - "\u0120Sou", - "\u0120hvor", - "unintelligible", - "\u0120\u00eb\u00b2\u0137", - "\u0120\u00c2\u00b0", - "iin", - "\u0120\u00d7\u00a2\u00d7\u013f", - "\u0120derri\u00c3\u00a8re", - "\u0120czym", - "\u0120Apost", - "\u0120regarder", - "\u0120agrade", - "\u0120Candy", - "\u0120mare", - "\u0120introduces", - "birds", - "\u0120uniquely", - "\u0120muk", - "\u0120cooker", - "\u0120crews", - "\u0120jeito", - "ERT", - "\u00b6\u0126\u00eb", - "nisse", - "\u0120ef", - "\u0120carte", - "\u0120Yak", - "\u0120PAT", - "\u00d0\u00b8\u00d0\u00bd\u00d0\u00be", - "bokki", - "\u0120mates", - "\u0120distint", - "\u0120\u00ec\u00bd\u0136\u00eb\u00a1\u013e\u00eb\u0124\u013a", - "\u0120y\u00c4\u00b1l", - "\u0120\u00ce\u00ba\u00ce\u00ac\u00ce\u00bd", - "\u0120configurations", - "enga", - "recht", - "Happy", - "\u00e3\u0124\u0126\u00e3\u0123\u00a3\u00e3\u0123\u00a6", - "invest", - "\u0120reconstruct", - "\u0120\u00d1\u012f\u00d1\u0124\u00d0\u00be\u00d0\u00bc\u00d1\u0125", - "\u0120mosque", - "raum", - "\u0120voyez", - "\u0120NBC", - "\u0120\u00ec\u0140\u0132\u00ec\u012d\u0142", - "\u0120sturdy", - "\u0120\u00d0\u00ba\u00d0\u00b0\u00d0\u00bf", - "\u0120ansch", - "alid", - "\u0120masih", - "\u0120REP", - "\u0120\u00ec\u00bd\u0136\u00eb", - "\u0120deduct", - "\u0120salir", - "wurf", - "ilot", - "\u0120Mutter", - "olds", - "\u0120FEMA", - "\u0120Bib", - "\u0120neighboring", - "\u0120bliss", - "\u0120\u00ed\u013a\u00bc", - "\u00d0\u00bb\u00d0\u00b8\u00d1\u0123\u00d1\u012e", - "\u0120\u00d1\u0124\u00d1\u0122\u00d0\u00b5\u00d0\u00b1", - "\u0120\u00e5\u00b0\u00b1\u00e6\u013a\u00af", - "\u0120grenade", - "\u0120egal", - "\u0120finely", - "\u0120petals", - "\u0120keer", - "\u0120chyba", - "\u0120skipping", - "\u0120thirteen", - "\u0120gravy", - "\u0120SAT", - "61", - "\u0120\u00d0\u00bd\u00d0\u00be\u00d0\u00b3", - "\u0120mins", - "ITE", - "\u0120sozial", - "\u00ed\u0137\u013a\u00eb\u00a9\u00b4\u00ec\u0126\u013e", - "ruktur", - "\u0120\u00d0\u00b2\u00d0\u00be\u00d0\u00b7\u00d0\u00bc\u00d0\u00be\u00d0\u00b6", - "\u0120\u00d0\u00be\u00d0\u00bf\u00d1\u0131\u00d1\u0124\u00d1\u012e", - "\u0120arth", - "\u0120Cuban", - "\u0120treasures", - "\u0120fertilizer", - "\u0120awakening", - "\u0120\u00eb\u00b0\u00b1\u00ec\u012d\u0142", - "\u0120rall", - "\u0120depict", - "\u0120Pablo", - "\u0120nineteen", - "\u0120watt", - "\u0120entirety", - "KS", - "\u0120Woods", - "Sch", - "\u0120\u00da\u00a9\u00d9\u012a", - "\u0120Dry", - "\u00e3\u0123\u0140", - "uve", - "\u0120reconstruction", - "\u0120anatomy", - "\u012a\u00eb\u00a5\u00bc", - "\u0120baba", - "\u0120listener", - "\u0120sharpen", - "\u0120Peru", - "\u0120\u00d0\u00b2\u00d1\u012d\u00d0\u00b7", - "\u0120recreation", - "\u0120initiate", - "\u0120calor", - "\u0120Naj", - "gee", - "\u0120Feels", - "\u0120Snapchat", - "\u0120Tet", - "\u0120Nest", - "\u0120Daf", - "\u0120Finish", - "\u0120\u00d1\u0124\u00d0\u00b0\u00d0\u00ba\u00d0\u00b8\u00d0\u00bc", - "\u00c3\u00bac", - "izens", - "\u0120spins", - "\u0120embry", - "\u0120passages", - "\u0120cient", - "\u0120justification", - "\u00e4\u00bb\u0138\u00e8\u00aa\u00aa", - "\u0120olmaz", - "\u0120flooded", - "\u0120emoji", - "\u0120embracing", - "\u0120discard", - "\u0120Basic", - "agog", - "\u0120\u00ec\u013e\u0126\u00ed\u0137\u00b4", - "\u0120asylum", - "erin", - "\u0120fim", - "\u0120ninja", - "\u0120automate", - "\u0120allergic", - "\u00c3\u00bf\u00c3\u00bf\u00c3\u00bf\u00c3\u00bf", - "amam", - "\u0120\u00d0\u00bc\u00d0\u00b0\u00d1\u0122", - "\u0120Oi", - "\u00c3\u00a4us", - "\u0120induct", - "\u0120BEN", - "\u0120z\u00c5\u0124", - "\u0120ka\u00c5\u00bcdy", - "\u0120AMP", - "n\u00c4\u013d", - "Sure", - "\u0120quil", - "\u0120espec", - "rok", - "BSCRI", - "\u0120liebe", - "pus", - "achsen", - "\u0120cricket", - "\u00eb\u012c\u0132", - "\u0120Frame", - "ekk\u00c3\u00bcr", - "arb", - "\u0120p\u00c5\u013b", - "\u00d0\u00b8\u00d1\u0123\u00d1\u0123", - "\u0120zeggen", - "\u0120doubles", - "\u0120Dre", - "test", - "insp", - "boys", - "\u0120m\u00c3\u00a3o", - "\u0120Verse", - "\u0120muscular", - "\u0120MALE", - "\u0120dulu", - "\u0120occasional", - "Lo", - "conomic", - "\u0120vak", - "\u0120remedy", - "\u00e5\u00a4\u0142", - "\u0120\u00e2\u013b\u00aa\u00e2\u013b\u00aa\u00e2\u013b\u00aa", - "vem", - "\u0120\u00c3\u00b6nem", - "\u0120kar\u00c5\u0141\u00c4\u00b1", - "\u0120Sharp", - "hur", - "\u0120\u00eb\u00b0\u00a9\u00eb\u00b2\u0137", - "\u0120grandson", - "\u0120aktiv", - "\u0120Thrones", - "\u0120\u00ec\u0137\u012a\u00ec\u0139\u0132", - "\u0120tots", - "\u0120subd", - "\u0120Paula", - "\u0120graves", - "\u0120Brent", - "\u0120\u00d0\u00bd\u00d0\u00b8\u00d0\u00ba\u00d1\u0124\u00d0\u00be", - "\u0120s\u00c3\u00b6z", - "\u0120crec", - "\u0120Vladimir", - "\u00e7\u0138\u00ab", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00b9", - "\u0120\"-", - "\u0120psy", - "atri", - "idan", - "\u0120a\u00c3\u00ban", - "\u0120standardized", - "\u00ec\u00b9\u013a\u00eb", - "\u0120\u00d0\u00ba\u00d1\u0122\u00d0\u00be\u00d0\u00b2", - "\u0120Zhu", - "something", - "\u0120750", - "\u0120mujeres", - "\u0120ait", - "\u00e9\u0139\u00b4", - "agu", - "\u0120corrected", - "ikka", - "eled", - "\u0120Career", - "owym", - "\u0120roommate", - "\u0120descendants", - "\u0120Napoleon", - "\u0120\u00d0\u0136\u00d0\u00be", - "\u00ed\u0138\u012a\u00ec\u0138\u00b4\u00ec\u013c\u0136", - "\u0120bunun", - "\u0120Micha", - "\u00e7\u00b7\u013c", - "\u0120descob", - "PI", - "\u0120palabra", - "\u0120tracked", - "\u0120dependence", - "\u0120Barack", - "\u00e5\u0123\u0129", - "\u0120fertility", - "\u0120Southwest", - "\u0120incomplete", - "\u0120comunic", - "\u0120compris", - "\u0120Restaur", - "\u0120acron", - "\u00ce\u00ba\u00ce\u00b1", - "\u0120apprentices", - "\u0120musst", - "\u0120Abr", - "\u0120pentru", - "\u0120Consort", - "\u0120Avec", - "\u0120dumplings", - "LR", - "\u0120wszystkie", - "\u0120swamp", - "\u00d0\u00bd\u00d0\u00b5\u00d0\u00b2", - "uggle", - "\u0120watercolor", - "\u0120proton", - "\u0120Espa\u00c3\u00b1a", - "ocking", - "\u00d0\u00be\u00d0\u00b2\u00d0\u00b0\u00d0\u00bb", - "\u0120takim", - "Very", - "\u0120dementia", - "\u0120\u00c5\u0141eyi", - "Jac", - "\u0120MacBook", - "\u0120Liv", - "fficients", - "\u0120Hunt", - "\u0120overlay", - "\u00e6\u0126\u0141\u00e8\u00a6\u00ba", - "\u0120Skype", - "punkt", - "\u0120confined", - "\u0120Adrian", - "\u00d8\u00b1\u00d9\u0125", - "\u0120Jeep", - "\u0120enquanto", - "\u0120anest", - "\u00d0\u00be\u00d1\u0124\u00d0\u00b2\u00d0\u00b5\u00d1\u0124", - "\u0120\u00d0\u00bc\u00d0\u00b5\u00d0\u00bd\u00d1\u012e", - "\u0120irrigation", - "\u00e1\u00bb\u0133n", - "\u0120eighteen", - "\u0120Pon", - "\u0120rescued", - "\u01201983", - "r\u00c3\u00bc", - "jae", - "\u0120Jeong", - "\u0120amazingly", - "\u0120FDP", - "\u0120backstage", - "cue", - "\u0120\u00cf\u0125\u00cf\u0126\u00ce\u00b7\u00ce\u00bd", - "\u0120\u00d8\u00a7\u00d9\u0126\u00d8\u00b5", - "\u0120livestock", - "\u0120Warner", - "\u0120majors", - "\u00e3\u0125\u0123\u00e3\u0125\u00a3", - "\u0120cooperative", - "\u0120Brady", - "rained", - "rieb", - "\u0120\u00d7\u0133\u00d7\u0140\u00d7", - "\u0120\u00d0\u00b4\u00d0\u00be\u00d0\u00b2\u00d0\u00be\u00d0\u00bb\u00d1\u012e\u00d0\u00bd\u00d0\u00be", - "\u0120FE", - "\u0120leaked", - "\u0120Mercury", - "\u0120persuade", - "\u0120transformer", - "\u0120Norweg", - "\u0120\u00ec\u0139\u00ac\u00eb\u0141\u00ac", - "\u0120zrobi\u00c4\u0129", - "\u0120cardiovascular", - "\u0120Crash", - "\u0120gossip", - "\u00d0\u00b0\u00d1\u0123\u00d1\u0124\u00d1\u012e", - "\u0120\u00ec\u00aa\u00bd", - "\u0120swept", - "\u0120Horn", - "\u0120At\u00c3\u00a9", - "\u0120bukan", - "\u0120Kaw", - "KY", - "\u0120Stories", - "Gary", - "\u0120gardening", - "\u0120Quickly", - "\u0120Falcon", - "\u0120ovat", - "c\u00c4\u00b1", - "\u0120Complet", - "\u0120Date", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b8\u00d0\u00bc", - "\u0120l\u00c3\u00a4uft", - "\u0120Audrey", - "\u0120Went", - "\u0120pel\u00c3\u0143cul", - "\u0120carriage", - "\u0120unacceptable", - "nymi", - "\u0120\u00d1\u0123\u00d0\u00bb\u00d1\u012d\u00d1\u012a", - "\u0120terre", - "uellement", - "EEEE", - "\u0120pharmac", - "h\u00c3\u00b5es", - "\u0120zich", - "\u0120migrate", - "\u0120Fry", - "\u00c3\u00b1ana", - "\u0120Muito", - "EOVER", - "\u0120fortress", - "\u0120Compan", - "\u0120JSON", - "ordnung", - "\u0120warto", - "\u0120ungef", - "\u00ec\u0127\u0136\u00ec\u0126\u013e", - "\u0120\u00d1\u0122\u00d0\u00be\u00d0\u00ba", - "\u0120paddle", - "Jared", - "\u0120submitting", - "\u0120latch", - "\u0120fug", - "\u0120\u00d0\u00ba\u00d0\u00be\u00d1\u0123", - "\u0120Ef", - "\u0120launches", - "\u0120ft", - "otechn", - "\u0120travelled", - "\u00d8\u00a7\u00d9\u0123", - "\u00e9\u0123\u0137", - "\u0120proch", - "\u0120dedim", - "83", - "\u0120rebound", - "\u0120LU", - "path", - "\u0120\u00d1\u0123\u00d0\u00bf\u00d1\u0122\u00d0\u00b0\u00d0\u00b2", - "\u0120\u00c3\u00b6l", - "\u0120\u00ed\u0124\u00a4", - "\u0120privat", - "\u0120tractor", - "\u0120Attention", - "Ser", - "\u0120coses", - "\u00c3\u00a1ria", - "pal", - "\u0120\u00ec\u013f\u0122", - "\u0120successor", - "\u0120connectors", - "\u0120\u00d1\u0125\u00d1\u0123\u00d1\u0124\u00d0\u00b0\u00d0\u00bd\u00d0\u00be\u00d0\u00b2", - "\u0120genocide", - "\u0120sufficiently", - "\u0120Aix\u00c3\u00b2", - "\u0120stabilize", - "\u0120congest", - "\u0120carving", - "\u0120zost", - "\u0120\u00d0\u00b1\u00d1\u012d\u00d1\u0123\u00d1\u0124\u00d1\u0122\u00d0\u00be", - "\u0120shortest", - "\u0120livel", - "\u012089", - "\u00e9\u0123\u012c", - "\u0120erk", - "\u0120portraits", - "\u00e0\u00a5\u0122", - "\u00e8\u013a", - "boat", - "llah", - "ANC", - "\u0120empirical", - "\u0120Echo", - "\u0120Nederland", - "\u00e8\u00bf\u013b\u00e4\u00b9\u012a", - "Net", - "\u0120cuidado", - "\u0120Roma", - "\u0120calf", - "\u0120giants", - "\u0120Explorer", - "\u0120Collect", - "alition", - "\u0120Destiny", - "\u0120ausge", - "\u0120Edu", - "\u0120Clo", - "\u0120earrings", - "\u0120Track", - "\u0120ROS", - "\u0120Belle", - "\u00e7\u013b\u00be", - "\u0120pueda", - "\u0120daytime", - "\u0120supplier", - "\u0120SV", - "\u0120Exhale", - "\u0120galera", - "course", - "\u0120centimeter", - "\u0120Bast", - "mud", - "\u0120sangat", - "\u0120Physical", - "\u0120privately", - "\u0120trata", - "lynn", - "illi", - "\u0120\u00eb\u00a9\u0136\u00ec\u013f\u00b4\u00ed\u0123\u00ac\u00ec\u0139\u0127", - "\u0120crystall", - "\u0120pods", - "\u00e1\u00ba\u00a3n", - "inator", - "\u0120Records", - "\u00e5\u00ae\u013a", - "\u00c4\u0141imiz", - "issement", - "hare", - "hadow", - "\u0120DK", - "\u0120\u00ec\u0137\u012e\u00ea\u00b3\u0142", - "\u0120wyn", - "\u0120requesting", - "\u0120Donna", - "\u0120\u00ec\u0139\u00b4\u00ec\u012d\u00ac\u00ed\u0140\u012a", - "inea", - "\u0120exert", - "\u0120Duncan", - "\u0120\u00d0\u00b2\u00d0\u00b5\u00d1\u0129", - "\u0120Hah", - "\u00e0\u00a4\u0124", - "\u0120Lif", - "\u0120Finding", - "\u0120Nov", - "\u0120\u00d0\u00b7\u00d0\u00bd\u00d0\u00b0\u00d0\u00ba", - "\u0120\u00d0\u00be\u00d1\u0126", - "\u0120Qu\u00c3\u00a8", - "\u0120quarterback", - "\u0120\u00d1\u0126\u00d0\u00b0\u00d0\u00ba", - "\u0120bipartisan", - "\u00c4\u0141in", - "\u0120n\u00c3\u00a9cess", - "\u0120referendum", - "\u0120compiler", - "\u0120probabil", - "\u00d0\u00b5\u00d0\u00b4\u00d0\u00b8", - "\u0120trader", - "\u00e6\u013a\u0135", - "\u0120Rum", - "geme", - "\u0120dio", - "\u0120b\u00c4\u013bdziemy", - "\u0120\u00cf\u0122\u00ce\u00ac", - "\u00ea\u00be\u00b8", - "\u00d7\u0137\u00d7\u013a", - "\u0120\u00e0\u00a4\u0137", - "\u0120\u00d0\u00b1\u00d0\u00bb\u00d0\u00b0\u00d0\u00b3", - "\u0120scalp", - "\u0120Pause", - "\u0120caption", - "\u0120endanger", - "\u0120enlar", - "\u0120rotten", - "\u00e3\u0125\u0125\u00e3\u0125\u012a", - "\u0120wah", - "\u00e8\u0124\u012b", - "\u0120dzi", - "\u0120Install", - "Ay", - "\u0120crear", - "\u00d0\u00b5\u00d0\u00bd\u00d1\u0124\u00d0\u00b0", - "\u0120weighing", - "\u0120butterflies", - "\u0120Gast", - "\u00e4\u00ba\u0137", - "horn", - "warz", - "ICEOVER", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d0\u00b9\u00d1\u0124\u00d0\u00b8", - "\u0120coefficients", - "\u00e7\u00b0\u00a1\u00e5\u0138\u00ae", - "\u0120Spencer", - "\u0120Higher", - "\u0120cowork", - "\u00e5\u00a8\u013a", - "\u0120\u00d0\u00ba\u00d0\u00be\u00d1\u0124\u00d0\u00be\u00d1\u0122\u00d0\u00be\u00d0\u00b5", - "\u0120monit", - "\u0120dysfunction", - "\u0120\u00d1\u0123\u00d1\u0124\u00d0\u00b0\u00d0\u00bd\u00d0\u00be\u00d0\u00b2", - "\u0120tournaments", - "\u0120oyster", - "BN", - "\u0120trud", - "slow", - "\u0120Penny", - "\u0120Odys", - "\u00c3\u00a6r", - "\u0120fou", - "\u0120enjoyment", - "\u00d0\u00b0\u00d1\u0124\u00d1\u012d", - "\u0120wygl\u00c4\u0127da", - "\u00d0\u00b0\u00d0\u00bb\u00d1\u012e\u00d0\u00bd\u00d0\u00b0\u00d1\u0131", - "\u0120Protect", - "\u0120moy", - "\u0120claw", - "\u0120suspicion", - "\u0120sacrificed", - "\u0120gosto", - "Big", - "\u0120aggressively", - "\u0120vorne", - "\u00e3\u0125\u0142", - "\u0120blamed", - "\u0120Sehr", - "\u00d7\u00a4\u00d7\u00a8", - "cito", - "\u0120seals", - "\u0120mujer", - "\u0120Weird", - "\u0120forens", - "\u0120contributes", - "estra", - "\u0120pog", - "LOL", - "\u0120hacerlo", - "\u00d0\u00be\u00d1\u0124\u00d1\u012e", - "fiction", - "79", - "\u00ce\u00bb\u00ce\u00bf", - "\u00e5\u00a4\u00a7\u00e6\u00a6\u0124", - "\u00e5\u00a3\u00b0", - "\u0120\u00d1\u0124\u00d0\u00be\u00d0\u00b1", - "\u0120GS", - "\u0120Clara", - "itez", - "\u0120advocating", - "\u0120\u00ed\u0136\u0126\u00eb", - "sung", - "\u0120vertices", - "\u0120navigating", - "\u0120europ\u00c3\u00a9", - "\u00e7\u013c\u0128", - "\u0120slowed", - "\u0120foreground", - "\u0120Industrial", - "\u0120adore", - "\u00ec\u012d\u0143", - "\u0120cr\u00c3\u00a9er", - "\u00e6\u0140\u0139", - "chnitt", - "\u0120unaware", - "\u0120curly", - "entar", - "\u0120ler", - "\u0120prohibited", - "\u0120Heroes", - "\u0120Reed", - "uca", - "\u0120smok", - "\u0120kunna", - "zeitig", - "immen", - "\u0120Lun", - "\u0120\u00d0\u00b0\u00d0\u00b1\u00d1\u0123\u00d0\u00be\u00d0\u00bb\u00d1\u0130\u00d1\u0124", - "\u0120degli", - "\u0120villagers", - "\u0120preset", - "zept", - "uds", - "\u0120emit", - "\u00e4\u00bd\u0142\u00e8\u00a6\u0123", - "\u0120\u00eb\u012b", - "\u00eb\u012c\u0136\u00ec\u00a7\u0122", - "\u00d0\u00bd\u00d0\u00b0\u00d0\u00ba\u00d0\u00be", - "\u0120os\u00c3\u00b3b", - "\u01201969", - "\u0120\u00d0\u0132\u00d1\u0122", - "\u0120manchmal", - "\u0120Brock", - "\u0120mantra", - "\u0120WIL", - "bach", - "in\u00c3\u00a4", - "elas", - "keln", - "\u0120disciple", - "\u0120qualc", - "\u0120dehyd", - "\u00ec\u013f\u00b4\u00eb\u013f\u00bc\u00eb\u012c\u0136", - "Af", - "\u00ec\u0126\u00b1\u00ec\u013f\u00b4", - "Ryan", - "\u0120puppet", - "\u0120\u00d0\u00b4\u00d1\u0122\u00d1\u0125\u00d0\u00b3\u00d0\u00b8\u00d0\u00b5", - "\u0120rud", - "\u0120pending", - "Plus", - "\u0120\u00ec\u0137\u012c\u00ec\u013f\u0126", - "\u0120b\u00e1\u00bb\u012d", - "\u0120Sega", - "\u00c3\u00a7e", - "\u0120programmer", - "bli", - "\u0120unl", - "\u0120enslaved", - "\u0120soci\u00c3\u00a9t\u00c3\u00a9", - "\u00c4\u0123h", - "\u0120inheritance", - "\u0120Bangl", - "ermaid", - "\u0120practitioner", - "\u0120Stalin", - "\u0120User", - "cible", - "\u0120cardiac", - "\u0120Koreans", - "\u0120dumped", - "\u0120\u00d7\u0136\u00d7\u013b\u00d7\u0136", - "\u00c3\u00a1is", - "\u0120hydraulic", - "oubtedly", - "\u0120Pit", - "\u0120picnic", - "\u0120beh\u00c3\u00b6ver", - "\u0120\u00d1\u0123\u00d0\u00bc\u00d0\u00be\u00d0\u00b3", - "\u0120braking", - "\u00e9\u00bb\u0133", - "utar", - "\u0120\u00ec\u0126\u00b8\u00eb", - "ubl", - "\u0120\u00c3\u00bcz", - "\u0120majesty", - "\u0120bers", - "utable", - "\u0120hotter", - "\u00e7\u0127\u00a7", - "\u00db\u012e\u00d9\u0128", - "\u0120biases", - "\u0120subjected", - "\u0120naughty", - "\u0120circus", - "\u00e3\u0123\u0139\u00e3\u0123\u012d", - "\u0120Immedi", - "\u0120Stefan", - "\u0120Triple", - "enk", - "\u0120wit", - "\u0120recycle", - "emie", - "dated", - "\u0120unload", - "\u0120popula", - "chin", - "\u0120yields", - "\u0120english", - "\u0120Bonnie", - "\u0120spiders", - "\u00c3\u0123", - "\u0120erosion", - "\u00e9\u0125\u00a8\u00e5\u012a\u0128", - "\u0120NICK", - "\u00d0\u00b8\u00d1\u0131\u00d1\u0127", - "\u0120impart", - "\u0120\u00d0\u00ba\u00d0\u00bd\u00d0\u00b8", - "\u0120resolutions", - "\u0120lithium", - "\u0120convergence", - "\u0120Tara", - "\u0120\u00d0\u00b4\u00d0\u00b2\u00d0\u00b5", - "ths", - "\u0120Cindy", - "\u00e6\u012a\u0133\u00e8\u00a6\u0123", - "\u00e5\u00b9\u00ab", - "\u0120DIE", - "\u0120assurance", - "\u0120\u00d0\u00be\u00d0\u00bf\u00d0\u00b8\u00d1\u0123", - "\u0120buckets", - "\u0120cues", - "\u0120Quiet", - "\u0120similarity", - "\u0120foundational", - "\u0120Minist", - "\u00e6\u00bb\u00bf", - "\u0120pian", - "\u0120centr", - "\u0120numb", - "\u0120monks", - "ujourd", - "enzie", - "\u0120skateboard", - "\u0120dlatego", - "\u0120\u00d1\u0123\u00d0\u00be\u00d1\u0124", - "\u0120AE", - "\u0120masterpiece", - "\u0120Solomon", - "\u0120Reddit", - "\u0120riot", - "abl", - "\u0120Jazz", - "\u0120electromagnetic", - "\u0120insecure", - "\u0120Compet", - "geries", - "\u00d0\u00be\u00d0\u00b1\u00d0\u00be\u00d0\u00b4", - "\u0142\u00d7\u0137", - "\u00f0\u0141\u0134", - "\u0120senators", - "\u0120Brisbane", - "\u0120Alb", - "uttering", - "\u0120Allow", - "zero", - "\u0120pai", - "\u0120\u00d0\u0132\u00d0\u00bb\u00d0\u00b5\u00d0\u00ba\u00d1\u0123", - "\u0120Display", - "\u0120Blade", - "\u0120Apps", - "\u0120p\u00c3\u00a4", - "\u0120\u00d0\u00b4\u00d0\u00b5\u00d1\u0123\u00d1\u0131", - "\u0120quella", - "\u0120Gao", - "\u00d0\u00b5\u00d0\u00bd\u00d0\u00bd\u00d1\u012d\u00d1\u0127", - "\u0120spoilers", - "\u0120gallons", - "\u0120\u00d9\u0126\u00d9\u012c", - "\u0120Zion", - "\u00e6\u013e\u012b\u00e4\u00b8\u0122", - "onie", - "ragt", - "\u0120Chand", - "\u0120\u00eb\u00b3\u0133", - "\u0120blunt", - "\u0120usu", - "\u0120Kad", - "rakt", - "\u0120cinematic", - "\u0120ammunition", - "rene", - "\u0120fourteen", - "\u0120Carn", - "crit", - "\u0120tenure", - "vu", - "\u0120principalmente", - "\u0120alleen", - "\u00e9\u0122\u013b\u00e4\u00b8\u0122", - "\u0120komplett", - "\u0120d\u00c3\u00bcny", - "James", - "\u0120receptor", - "\u0120oneself", - "guru", - "\u0120merchant", - "liness", - "\u0120overlooked", - "\u0120harmonic", - "\u00e9\u0137\u00bf", - "ieso", - "\u00d7\u0137\u00d7\u0140", - "colm", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d0\u00b5\u00d0\u00ba\u00d1\u0124", - "\u0120Ada", - "\u00d8\u00a7\u00d8\u00b3", - "Tim", - "\u0120recurring", - "\u0120proceeds", - "\u0120Particularly", - "\u0120Download", - "etrical", - "\u0120matrices", - "\u0120proyecto", - "ancies", - "\u0120Uhm", - "\u0120caves", - "\u0120\u00ec\u0138\u00b4\u00eb\u0142\u00a4", - "\u0120Leaf", - "\u0120\u00d0\u00be\u00d0\u00b1\u00d1\u012d\u00d1\u0129", - "\u0120\u00ec\u013f\u00b4\u00ec\u013e\u0142", - "Europe", - "\u0120t\u00c4\u0127", - "\u0120puls", - "\u0120takiego", - "\u00d0\u013f\u00d0\u00b5", - "GU", - "\u0120fors", - "\u00cf\u0123\u00ce\u00b3", - "\u0120fotos", - "\u0120))", - "\u0120\u00eb\u00a9\u00a4\u00eb", - "\u0120aquilo", - "\u0120Kurd", - "\u00ef\u00b8\u0131", - "ptic", - "\u0120Dort", - "\u0120misery", - "auso", - "\u00e5\u012c\u0141", - "chuckling", - "\u0120Ridge", - "\u0120\u00ed\u0138\u012a\u00ec\u012c\u00b5\u00eb\u012d\u012a\u00eb\u012d\u00a4", - "\u0120***", - "\u00e5\u00ae\u00a2", - "\u0120Hmmm", - "\u0120geographic", - "\u0120anys", - "\u0120talvez", - "\u0120skelet", - "\u0120signatures", - "\u0120liters", - "\u0132\u00eb\u00a9\u00b4", - "\u0120\u00d1\u0123\u00d0\u00b2\u00d0\u00be\u00d0\u00b5\u00d0\u00b3\u00d0\u00be", - "\u0120skiing", - "\u0120\u00d0\u013e\u00d0\u00be\u00d1\u0123", - "\u0120adopting", - "\u0120haft", - "\u0120symmetric", - "\u0120Liqu", - "\u0120thyroid", - "\u0120misin", - "lude", - "\u0120hull", - "\u0120XD", - "\u0120Gust", - "zeich", - "\u0120vibrations", - "\u0120esemp", - "\u0120\u00d0\u00b2\u00d1\u0123\u00d1\u0130", - "\u0120Quem", - "\u0120\u00c3\u00bcbrig", - "\u0120Ske", - "\u0120Lynch", - "rooms", - "artet", - "fest", - "\u0120fr\u00c3\u00bcher", - "\u0120lure", - "\u00e4\u00b8\u012f\u00e5\u00a5\u00bd\u00e6\u0126\u0131\u00e6\u0122\u013f", - "\u0120\u00ec\u0137\u012e\u00ec\u0137\u0126", - "\u0120WIN", - "\u0120RYAN", - "\u0120\u00d0\u00ba\u00d0\u00be\u00d1\u0124\u00d0\u00be\u00d1\u0122\u00d1\u0125\u00d1\u0130", - "\u0120Kash", - "\u0120\u00d7\u0136\u00d7\u0140", - "\u0120safeg", - "\u0120Hallelujah", - "\u0120\u00d0\u00b4\u00d0\u00b2\u00d1\u0125\u00d1\u0127", - "\u0120staple", - "\u0120sediment", - "\u0120Acts", - "\u0120blaming", - "\u0120mainland", - "\u0120sporting", - "\u0120decorations", - "\u0120executing", - "\u0120paran", - "\u0120Dollar", - "\u0120projections", - "\u0120commissioned", - "\u0120bour", - "\u00c3\u00b6m", - "\u0120steamed", - "\u0120\u00eb\u0143\u013a", - "\u0120petrol", - "\u0120celular", - "\u00e5\u00b8\u00b6", - "\u0120Hungary", - "\u0120rented", - "\u0120\u00d0\u00b2\u00d0\u00b0\u00d1\u0122\u00d0\u00b8", - "bbie", - "\u0120s\u00c3\u00a9cur", - "\u00c3\u00bcll", - "\u0120swings", - "between", - "\u0120\u00d0\u00b8\u00d1\u0124", - "estro", - "\u0120niemand", - "\u0120\u00ec\u0124\u00bc", - "\u0120Pardon", - "esses", - "\u0120MID", - "\u0120centralized", - "\u0120Alien", - "culos", - "\u0120crise", - "\u00e8\u00a3\u00a1\u00e9\u013f\u00a2", - "\u0120classe", - "beitet", - "i\u00c4\u0141i", - "\u0120whales", - "\u0120perimeter", - "\u0120tying", - "\u0120strony", - "\u0120likewise", - "\u0120Punch", - "Da", - "\u0120Baptist", - "\u0120sorting", - "\u0120iv", - "\u0120\u00ed\u0137\u00a9", - "\u0120rehab", - "\u0120eta", - "river", - "\u0120sai", - "\u00e3\u0123\u0126\u00e3\u0123\u0141\u00e3\u0123\u0142", - "odus", - "\u00e3\u0123\u012c\u00e9\u00a1\u013a\u00e3\u0123\u0126\u00e3\u0123\u0139\u00e3\u0123\u00be\u00e3\u0123\u013b", - "\u0120essayer", - "\u0120turtles", - "\u0120Hazrat", - "\u0120fabrics", - "\u0120cavity", - "\u0120poniewa\u00c5\u00bc", - "\u0120schlecht", - "\u0120salsa", - "\u00c5\u0141ekk\u00c3\u00bcr", - "\u0120seating", - "\u0120economists", - "\u0120mang", - "\u0120seguinte", - "\u0120rang", - "\u0120ratios", - "\u0120constell", - "\u0120longtemps", - "uating", - "\u0120spoiled", - "\u0120recipients", - "\u0120sniper", - "\u00e4\u00b9\u012d\u00e5\u012b\u012f", - "\u00ec\u012c\u00b5\u00eb\u012d\u012a\u00ea\u00b9\u012e", - "\u0120wp", - "\u0120LINKE", - "\u0120flare", - "\u0120Adri", - "\u00c3\u00b1as", - "\u0120backl", - "m\u00c3\u00a4\u00c3\u0141", - "\u0120Bend", - "\u0120workloads", - "\u0120\u00d1\u0123\u00d1\u0125\u00d0\u00bf", - "\u01201975", - "\u00d0\u00b8\u00d0\u00bc\u00d1\u0123\u00d1\u0131", - "\u00d0\u00b0\u00d0\u00bd\u00d0\u00b5", - "\u0120\u00d0\u00bc\u00d0\u00be\u00d0\u00bd", - "\u0120aspirations", - "\u0120Aer", - "\u0120\u00d0\u00b3\u00d0\u00be\u00d0\u00b2\u00d0\u00be\u00d1\u0122\u00d0\u00b8\u00d1\u0124\u00d1\u012e", - "\u0120Qian", - "\u00e5\u00a6\u012a", - "\u0120compromised", - "\u0120yolk", - "\u00d0\u00bb\u00d0\u00b0\u00d1\u0123\u00d1\u0124", - "\u0120hemen", - "rove", - "dens", - "\u0120\u00d0\u00ba\u00d0\u00be\u00d0\u00bc\u00d0\u00bc\u00d0\u00b5\u00d0\u00bd\u00d1\u0124", - "\u0120---", - "\u0120fluores", - "\u00d0\u00bd\u00d0\u00be\u00d1\u0123", - "\u0120Liverpool", - "\u0120\u00d1\u0123\u00d0\u00be\u00d0\u00b1\u00d0\u00be\u00d0\u00b9", - "\u0120Zwe", - "\u0120lumin", - "\u0120OG", - "\u00e1\u00b8", - "holm", - "profits", - "SN", - "\u0120proportions", - "\u0120mica", - "\u0120Boh", - "\u0120Atlas", - "\u0120unsure", - "\u0120touring", - "\u0120nied", - "\u0120t\u00c4\u013b", - "\u0120imperative", - "\u0120demek", - "\u0120Sheriff", - "rance", - "\u0120homeland", - "\u0120Hail", - "\u0120Ganz", - "ymm", - "Mon", - "\u00e5\u0128\u00b7", - "vida", - "\u0120desarroll", - "\u00e6\u012c\u0122", - "\u0120intriguing", - "\u0120Hugo", - "\u0120\u00e3\u0124\u0124", - "\u00e9\u00ac", - "\u00d0\u00b0\u00d1\u0128", - "\u0120Wi\u00c4\u013bc", - "atted", - "\u0120\u00ec\u0137\u0126\u00eb\u012d\u012a\u00ea\u00b3\u0142", - "\u0120Vari", - "\u00c3\u00a1d", - "\u0120surreal", - "\u0120disparities", - "\u0120m\u00c3\u00b3", - "ullen", - "\u0120\u00ec\u0140\u012a\u00eb\u012d\u00a4\u00ea\u00b3\u0142", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00b6\u00d0\u00b0\u00d0\u00bb\u00d1\u0125\u00d0\u00b9\u00d1\u0123\u00d1\u0124\u00d0\u00b0", - "\u0120mains", - "\u0120eject", - "\u0120methane", - "\u0120marginalized", - "\u0120chilli", - "r\u00c3\u00a8s", - "\u0120yem", - "\u00e4\u00bd\u0142\u00e6\u013a\u00af", - "\u0120Chun", - "\u0120debts", - "\u0120downloading", - "\u0120Athens", - "isierung", - "ryn", - "\u0120tekn", - "\u0120Quindi", - "\u00e9\u013e\u0122", - "\u0120taraf", - "\u0120h\u00c3\u00a9", - "\u0120consciously", - "\u0120fixes", - "uckle", - "may\u00c4\u00b1n", - "\u0120frei", - "\u0120spa", - "\u0120\u00ec\u00a7\u0126\u00ed\u0138\u012b", - "\u0120\u00d8\u00a7\u00d9\u0126\u00d8\u00b0", - "\u0120\u00d1\u0125\u00d0\u00ba", - "lett", - "\u0120olmu\u00c5\u0141", - "\u0120cheesy", - "\u00e0\u00b8\u00b2\u00e0\u00b8\u0123", - "naire", - "\u0120widen", - "\u0120lien", - "\u0120escaping", - "iggs", - "\u0120Blick", - "c\u00c4\u0127", - "\u0120\u00ec\u0126\u013e\u00eb", - "\u0120\u00d7\u0136\u00d7\u00a1", - "\u0120\u00d0\u00b2\u00d0\u00bf\u00d0\u00b5\u00d1\u0122", - "ophone", - "iell", - "\u0120SUBSCRI", - "\u0120lions", - "\u0120\u00ea\u00b7\u00b8\u00ea\u00b2\u0125", - "\u0120inspires", - "\u0120guarantees", - "\u0120come\u00c3\u00a7a", - "\u0120Growing", - "\u0120neglig", - "\u0120Frankf", - "\u0120gegeben", - "\u0120\u00c4\u0133\u00e1\u00ba\u00a7u", - "\u0120endlich", - "\u0120\u00ec\u012f\u00a8", - "\u0120TT", - "\u0120Lith", - "\u00cf\u0122\u00ce\u00b1", - "astern", - "\u0120Azer", - "\u0120lunar", - "hic", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d1\u0122\u00d0\u00be\u00d0\u00b4", - "\u0120nenhum", - "\u00e8\u00b7\u0133", - "\u0120Salvador", - "\u0120Progress", - "\u0120privileges", - "\u0120\u00eb\u0131\u013b\u00ec\u0137\u012a", - "\u0120antagon", - "\u0120Impf", - "\u0120descub", - "\u0120Lei", - "\u0120\u00ec\u0125\u012a\u00eb\u00a1\u013e", - "\u00d1\u0129\u00d0\u00b5", - "\u0120d\u00c3\u00b3lares", - "\u0120Meghan", - "\u0120Wire", - "too", - "aying", - "usc", - "\u0120tud", - "\u0120appeals", - "educ", - "\u0120pane", - "\u0120ji", - "\u0120decks", - "\u0120Alter", - "\u0120\u00e5\u00b0\u00b1", - "\u00ec\u0126\u00a4", - "\u00e5\u012a\u0128\u00e9\u0132\u013a", - "\u0120productions", - "\u0120WILLIAM", - "\u0120implied", - "\u0120fulfillment", - "\u0120Aah", - "\u0120saja", - "xus", - "\u0120\u00ce\u013c\u00ce\u00b1\u00ce\u00b9", - "\u00c3\u0142s", - "ucch", - "\u00d0\u00be\u00d0\u00ba\u00d0\u00be", - "\u0120Discord", - "\u0120SY", - "jsk", - "\u0120Wallace", - "unction", - "Daniel", - "\u0120k\u00c3\u00b6t", - "ijah", - "\u0120marche", - "\u0120disgr", - "\u0120mungkin", - "\u0120alma", - "\u00b3\u00b5", - "\u0120extensively", - "\u0120Floren", - "\u0120Allison", - "\u00e3\u0124\u00b1", - "\u00d9\u012c\u00d9\u0127", - "\u0120juven", - "\u0120Renaissance", - "\u0120fundraising", - "\u0120Chaos", - "\u0120paraly", - "\u0120narrator", - "\u0120ecosystems", - "Ash", - "\u0120mitigation", - "\u0120Aujourd", - "\u0120Idee", - "!,", - "\u0120\u00c2\u00bd", - "\u0120landlord", - "\u0120defects", - "\u0120acre", - "ulsive", - "\u0120algae", - "pek", - "\u0120emba", - "\u0120Roc", - "\u00e9\u013d\u00a2", - "ksom", - "\u00c3\u00a4che", - "\u0120leuk", - "\u0120leveraging", - "\u0120\u00ea\u00b7\u00b8\u00eb\u0142\u0129\u00ec\u00a7\u0122", - "\u0120Palm", - "\u0120\u00c3\u00a4ven", - "\u0120lis", - "\u0120Insp", - "\u0120Rita", - "\u0120Abb", - "ithm", - "\u0120supervision", - "\u0120revisit", - "\u0120pi\u00c4\u013b", - "\u0120euh", - "\u0120fades", - "\u0120motto", - "\u00e5\u012f\u00a1", - "\u00d0\u00b5\u00d0\u00b7\u00d0\u00b6", - "\u0120Shim", - "\u0120relevance", - "\u0120oo", - "\u0120ostat", - "nica", - "\u0120choix", - "\u0120Faculty", - "\u0120\u00ec\u00a4\u0133\u00ec\u0139\u0132", - "\u0120Above", - "\u0120\u00d0\u00bd\u00d0\u00b5\u00d0\u00b1\u00d0\u00be\u00d0\u00bb\u00d1\u012e\u00d1\u012a", - "\u0120sequencing", - "\u0120nutrient", - "\u0120conquered", - "\u0120digestive", - "\u0120backdrop", - "\u0120Lori", - "ailable", - "Game", - "\u0120neglected", - "omorph", - "illah", - "\u0120kne", - "\u0120siit\u00c3\u00a4", - "\u0120workspace", - "\u0120Venice", - "\u0120Kne", - "\u00d1\u012b\u00d0\u00be", - "\u0127\u0122", - "\u0120Hass", - "\u0120vita", - "\u013f\u00bc\u00eb\u00a9\u00b4", - "\u0120lays", - "\u00c3\u00aancias", - "\u00c3\u00a9rica", - "\u0120Ll", - "\u00e6\u00b1\u0124", - "\u0120Coca", - "\u0120WHY", - "\u00e8\u012a\u0140", - "\u0120routing", - "\u0120permissions", - "\u0120dings", - "prend", - "program", - "\u0120crocod", - "bral", - "AAAAAAAA", - "agit", - "\u0120N\u00c3\u00a4", - "\u0120gekommen", - "atten", - "\u0120referenced", - "\u0120pairing", - "\u0120Partner", - "\u0120Coronavirus", - "\u00d1\u0138\u00d1\u0123", - "\u00e8\u00bd\u012b", - "\u0120\u00d7\u0136\u00d7\u0135", - "\u0120espec\u00c3\u0143fic", - "arsi", - "quelle", - "\u0120spontaneous", - "\u00e7\u0128\u00b1", - "\u0120\u00ea\u00b2\u0125\u00ec\u013f\u0126", - "\u0120\u00d0\u0141\u00d0\u00be\u00d1\u0123\u00d0\u00bb\u00d0\u00b5", - "\u0120\u00d8\u00a7\u00d9\u0126\u00d8\u00af", - "\u0120Shout", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d0\u00bb", - "\u0120disguise", - "\u0120Jord", - "\u0120wee", - "\u0120miejsc", - "\u0120serum", - "\u0120plaisir", - "\u0120credible", - "\u0120b\u00c3\u00a5", - "\u0120AJ", - "mares", - "\u0120rods", - "\u0120eran", - "\u00e3\u0123\u00be\u00e3\u0123\u0124", - "\u0120p\u00c3\u00a4\u00c3\u00a4", - "\u0120UA", - "\u0120Unknown", - "\u0120\u00d9\u0126\u00d9\u0127", - "\u0120Rabbi", - "\u0120laat", - "\u0120hairstyle", - "\u0120\u00d8\u00ba", - "\u00e9\u0123\u012d", - "\u0120cach", - "\u0120Writing", - "\u00d0\u00be\u00d1\u0129\u00d0\u00ba\u00d0\u00b8", - "abad", - "\u0120straighten", - "--\"", - "wife", - "\u0120hottest", - "\u0120punya", - "\u0120Fashion", - "griff", - "\u0120QR", - "otch", - "\u0120\u00d0\u013e\u00d0\u00be\u00d0\u00b6\u00d0\u00b5\u00d1\u0124", - "Cloud", - "\u0120Strike", - "\u0120Hein", - "\u0120\u00e7\u013e\u0141\u00e7\u013c\u0126", - "\u0120lei", - "\u0120Flow", - "wegs", - "\u0120habr", - "\u00e5\u012b\u013d\u00e5\u012b\u013d", - "nahme", - "\u00cc\u0123", - "\u0120pleasing", - "opping", - "\u0120\u00ea\u00b5\u00ac\u00eb\u0131\u0127", - "\u0120dran", - "\u0120bangs", - "\u012079", - "\u0120sket", - "\u0120caval", - "\u0120Macron", - "\u0120weighted", - "\u0120muted", - "\u0120nuestras", - "EEP", - "\u0120mathematic", - "\u0120MRI", - "agus", - "\u0120therapies", - "\u00ce\u00b8\u00ce\u00b5", - "\u0120unpl", - "\u0120commencer", - "full", - "\u0120towels", - "\u0120prue", - "\u0120licenses", - "\u00d7\u013d\u00d7\u0137\u00d7\u013e", - "\u0120\u00d0\u0141\u00d0\u00be\u00d1\u0129\u00d0\u00b5\u00d0\u00bc\u00d1\u0125", - "\u0120pointless", - "Bye", - "\u0120eligibility", - "\u0120scrape", - "\u0120abusive", - "\u0120Mant", - "\u0120jeunes", - "tal", - "\u0120Princip", - "\u0120Orthodox", - "\u0120melod", - "\u0120\u00d0\u00bc\u00d0\u00b0\u00d1\u0124\u00d0\u00b5\u00d1\u0122\u00d0\u00b8", - "\u0120prosecutor", - "\u0120opioid", - "\u0120\u00d1\u0125\u00d0\u00b2\u00d0\u00b5\u00d1\u0122", - "\u0120Been", - "\u0120\u00ec\u0142\u0133\u00ec\u00a2\u0127", - "\u0120dynasty", - "\u0120ajuda", - "\u0120entreg", - "\u0120weighed", - "\u0120eure", - "\u0120Bem", - "\u0120abnormal", - "82", - "\u0120JR", - "\u0120Akt", - "\u0120Bri", - "\u00c3\u00bat", - "\u0120stagn", - "!*", - "\u0120wegen", - "\u0120leaking", - "\u0120Words", - "\u0120Mau", - "\u0120vue", - "\u0120Liam", - "\u00d0\u00b0\u00d0\u00bd\u00d0\u00b8\u00d0\u00b5\u00d0\u00bc", - "\u0120clinicians", - "\u0120Pump", - "\u0120f\u00c3\u00b6rst", - "?...", - "\u0120automotive", - "\u0120Owen", - "zusagen", - "\u0120Hundred", - "\u0120decentralized", - "\u0120bulbs", - "\u0120\u00d7\u013e\u00d7\u013d", - "\u0120provinces", - "\u0120Milan", - "81", - "kas", - "\u0120\u00eb\u0135\u00a3", - "\u0120for\u00c3\u00a7a", - "\u0120rightly", - "\u00e5\u00b3\u00b6", - "r\u00c4\u0127", - "\u0120venues", - "\u0120wai", - "\u0120predicting", - "\u0120WiFi", - "\u0120\u00ea\u00b6\u0123\u00ea\u00b8\u012a", - "\u00d8\u00b1\u00d9\u012a", - "\u0120\u00d7\u0136\u00d7\u0138", - "century", - "\u0120gradual", - "\u0120Probleme", - "\u0120\u00ec\u0139\u0127", - "\u0120coping", - "\u0120Brus", - "\u0120peanuts", - "irtschaft", - "\u0120\u00d0\u00b7\u00d0\u00b0\u00d0\u00bb", - "\u0120Troy", - "\u0120sperm", - "\u0120Mitar", - "\u0120T\u00c3\u00bcrkiye", - "grand", - "\u00a6\u0143", - "\u0120\u00d7\u0140\u00d7\u00a1", - "\u0120pans", - "\u0120Knowledge", - "berly", - "\u0120\u00d0\u0137\u00d0\u00b3\u00d0\u00be", - "\u0120danced", - "\u0120Frost", - "\u0120Burg", - "\u0120biting", - "\u00ec\u0142\u0137\u00ec\u013f\u0126", - "meal", - "\u0120heroic", - "\u0120motherboard", - "\u0120Licht", - "\u00e3\u0123\u00a3\u00e3\u0123", - "llan", - "\u00d0\u00b0\u00d0\u00b9\u00d0\u00bd", - "\u0120\u00d1\u0122\u00d1\u0131\u00d0\u00b4", - "\u0120\u00e0\u00b9\u0122\u00e0\u00b8", - "onen", - "irie", - "Art", - "rang", - "\u00ce\u00bd\u00ce\u00b7", - "\u0120newborn", - "\u0120amis", - "\u0120\u00d8\u00a7\u00d9\u012a\u00d8\u00b1", - "\u0120sophom", - "\u0120Careful", - "\u0120prospects", - "ensen", - "\u0120thrill", - "\u0120Vi\u00e1\u00bb\u0129t", - "Adam", - "rition", - "entric", - "uden", - "\u0120certificates", - "\u0120ashes", - "\u00e8\u00aa\u00bf", - "playing", - "\u0120sadece", - "\u0120ost", - "\u0120airplanes", - "\u00d1\u0122\u00d0\u00be\u00d0\u00ba", - "oner", - "\u0120magnesium", - "\u0120goddamn", - "\u01201972", - "\u0120Schule", - "\u0120temat", - "\u0120partout", - "\u00e0\u00af\u0124", - "\u0120inve", - "\u0120Scientists", - "\u0120Hudson", - "winning", - "ceksin", - "\u0120congressional", - "oru", - "\u0120ropes", - "\u00d0\u00b2\u00d0\u00b5\u00d0\u00b4", - "\u0120madre", - "\u0120ferry", - "\u0120Cohen", - "\u0120Pred", - "\u0120vagy", - "\u0120\u00d0\u00b1\u00d0\u00b5\u00d1\u0123\u00d0\u00bf", - "\u0120multim", - "\u0120drainage", - "\u0120simulator", - "giggles", - "\u0120Stadium", - "\u00d0\u00be\u00d0\u00b1\u00d1\u012b", - "\u0120notices", - "\u0120crawling", - "\u0120groupe", - "\u00e5\u0131\u00b8", - "\u0120kto\u00c5\u013d", - "\u0120Yoga", - "\u0120medida", - "\u0120\u00d1\u0127\u00d0\u00b2\u00d0\u00b0\u00d1\u0124", - "\u0120Lite", - "\u0120rav", - "orama", - "\u0120discord", - "\u0120DIRE", - "\u0120teh", - "\u0120Nurs", - "\u00e7\u00b2\u012b", - "\u0120pitched", - "\u0120barking", - "\u0120Coke", - "wiad", - "\u0120populated", - "\u00e9\u013b\u00a4", - "pelled", - "\u0120\u00d0\u00b1\u00d0\u00be\u00d0\u00b3", - "\u0120pewno", - "\u0120Cube", - "\u0120recruited", - "\u00e9\u0122\u013b\u00e7\u00a8\u00ae", - "\u0120Cara", - "\u00c4\u00b1\u00c4\u0141\u00c4\u00b1n\u00c4\u00b1", - "imated", - "\u0120\u00d1\u012a\u00d0\u00ba\u00d0\u00be\u00d0\u00bb", - "icional", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d1\u0126", - "\u0120contamination", - "\u0120\u00c3\u00baltimos", - "\u0120fearful", - "\u0120elephants", - "usi", - "\u0120iTunes", - "\u0120Swami", - "\u00ea\u00bc", - "\u0120\u00ec\u0126\u00a4\u00eb\u00aa\u0127", - "\u0120Richards", - "\u0120magnets", - "\u0120Richtung", - "\u0120Legion", - "\u00e8\u0131\u013e", - "\u0120kitty", - "\u0120kissed", - "\u0120watering", - "\u0120cono", - "\u0120Palestine", - "idir", - "\u0120maze", - "\u0120fluids", - "\u0120Producer", - "\u0120Krsna", - "\u00e5\u00a5\u00bd\u00e5\u0137\u00a6", - "laf", - "\u0120\u00d7\u0132\u00d7\u0137", - "\u0120miesz", - "\u0120Xing", - "ointed", - "sein", - "\u0120Fuk", - "\u0120Depression", - "\u0120Duty", - "\u0120Panther", - "\u0120sund", - "\u0120refere", - "\u0120exclusion", - "\u0120naval", - "\u0120Winston", - "\u0120slogan", - "\u0120hypothetical", - "\u0120elevate", - "\u00eb\u0142\u00b9", - "\u0120cabe\u00c3\u00a7a", - "\u0120Gesund", - "meter", - "\u0120\u00ec\u0137\u0126\u00eb\u012d\u012a\u00eb\u00a9\u00b4", - "\u0120cloudy", - "\u00e2\u0122\u00a6?", - "\u0120Schritt", - "\u0120JS", - "\u00ec\u012f", - "\u0120Springs", - "\u0120Batter", - "\u00b7\u00b0", - "\u0120tailor", - "\u0120PTSD", - "\u0120Gent", - "\u0120ba\u00c4\u0141", - "\u0120spatula", - "\u0120cray", - "\u0120Legisl", - "\u0120s\u00c3\u00ba", - "\u0120leve", - "\u00e0\u00b8\u00b2\u00e0\u00b8\u00a1", - "\u0120erad", - "\u0120dong", - "\u0120derm", - "\u0120Banks", - "icho", - "\u00e5\u0127\u012a\u00e7\u0136\u0141", - "\u0120Franz", - "ravel", - "\u00e9\u0123\u0136", - "\u00d0\u00be\u00d0\u00bb\u00d0\u00be", - "\u0120flute", - "\u0120Ek", - "\u0120joyful", - "\u0120chased", - "\u0120Large", - "Over", - "\u0120entrepreneurial", - "\u0120considers", - "\u00d1\u0125\u00d0\u00b5\u00d0\u00bc", - "opa", - "\u0120dormir", - "\u0120Elementary", - "\u0120przypad", - "\u00d1\u0125\u00d1\u0123\u00d0\u00ba\u00d0\u00b0", - "\u0120\u00d0\u00be\u00d1\u0129\u00d0\u00b5\u00d1\u0122", - "ugene", - "\u0120tenido", - "\u0120lugares", - "\u00eb\u00a5", - "\u0120\u00d1\u0129\u00d0\u00b0\u00d1\u0123\u00d1\u0124", - "\u0120sao", - "\u0120braid", - "\u0120Vere", - "\u0120Reich", - "\u0120Poss", - "\u0120inan", - "wand", - "ref", - "\u0120montrer", - "\u01201981", - "\u00e7\u0137\u00aa", - "as\u00c4\u00b1nda", - "\u0120chrome", - "\u0120Trinity", - "\u0120exploitation", - "\u0120Sense", - "\u0120CMS", - "\u0120Noble", - "\u0120\u00ec\u0126\u0142\u00ed\u0125\u013f", - "\u0120swelling", - "electronic", - "]?", - "\u0120brushing", - "\u0120liquidity", - "\u0120Hook", - "\u0120Connor", - "\u0120Alum", - "\u0120gucken", - "suite", - "\u0120wiele", - "\u0120barrels", - "\u0120Regel", - "\u0120Ment", - "\u0120Trip", - "\u0120Brush", - "\u0120Erik", - "urate", - "\u00c9\u013br", - "\u0120Cyr", - "ouble", - "\u0120Becca", - "\u0120passwords", - "\u00c5\u00b1", - "borg", - "\u0120vendo", - "\u0120Claus", - "\u0120Faz", - "indest", - "\u0120deceased", - "\u0120comparisons", - "\u0120LCD", - "\u0120Pork", - "\u0120eventual", - "\u0120patreon", - "\u0120inability", - "\u0120extinction", - "\u0120\u00ec\u00a2\u012d\u00ec\u0137\u0126\u00ed\u0137\u013a\u00eb\u012c\u0136", - "\u0120\u00d1\u0123\u00d0\u00be\u00d1\u0123", - "aju", - "\u0120\u00d7\u0133\u00d7\u0132\u00d7", - "\u0120sofort", - "\u0120destined", - "\u0120Rin", - "\u0120mouths", - "\u0120Nat\u00c3\u00bcrlich", - "\u0120preserving", - "\u0120limp", - "\u00e9\u00bb\u00a8", - "ocused", - "\u00d0\u00b8\u00d0\u00bd\u00d0\u00b3", - "\u0120exposing", - "\u0120\u00ce\u00be", - "\u00eb\u012f", - "laugh", - "\u0120hiss", - "\u00e3\u0123\u0142\u00e3\u0123\u012d\u00e3\u0124\u012b", - "\u0120indie", - "\u0120detal", - "\u00d1\u0122\u00d0\u00b0\u00d0\u00b2\u00d1\u0123\u00d1\u0124\u00d0\u00b2", - "\u0120tr\u00c3\u00aan", - "\u00e6\u0137\u00b0", - "\u0120ogni", - "\u0120simplemente", - "\u01201978", - "\u0120goo", - "\u01201967", - "\u0120genug", - "h\u00c3\u00b6", - "\u0120hist\u00c3\u00b3", - "\u00e5\u00ae\u0141", - "\u0120lobster", - "cendo", - "\u0120teil", - "\u0120allevi", - "0000", - "OLD", - "\u0120pesos", - "\u0120bonuses", - "\u0120ami", - "\u0120revival", - "\u0120Horse", - "\u0120sack", - "Talk", - "\u0120mulher", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d1\u0123\u00d1\u0124\u00d0\u00be\u00d1\u0131\u00d0\u00bd", - "\u0120Hood", - "Huh", - "\u0120\u00eb\u00b6\u0123", - "\u0120hyung", - "\u0120Meeting", - "\u0120importa", - "\u0120\u00ec\u00b0\u00be\u00ec\u0137\u0126", - "\u0120Vern", - "\u0120stripped", - "\u0120refuses", - "\u0120qualifications", - "opl", - "\u0122\u00eb\u0131\u0126", - "ix\u00c3\u0143", - "\u0120diab", - "itime", - "flows", - "\u0120inac", - "\u0120Gong", - "\u0120meaningless", - "\u0120courageous", - "\u0120microbi", - "azy", - "hist", - "\u0120volunteering", - "VIE", - "\u0120violated", - "\u0120sympathy", - "\u0120Edit", - "\u00e5\u00a5\u00bd\u00e5\u0125\u0131", - "electric", - "product", - "\u0120pandemia", - "\u0120geometric", - "\u0120Convers", - "gre", - "\u0120glut", - "isted", - "\u0120\u00d8\u00a7\u00d9\u0126\u00d9\u0125", - "\u0120Chain", - "\u0120Present", - "\u0120Yin", - "\u0120\u00d1\u0123\u00d0\u00be\u00d0\u00b3", - "\u0120Vlog", - "\u0120\u00ec\u0138\u00b4\u00eb\u00a8\u00b8", - "\u0120donn", - "\u0120hitch", - "ucking", - "\u00e3\u0123\u012c\u00e3\u0123\u0126", - "wald", - "risk", - "\u0120hari", - "\u0120Kens", - "\u0120Idol", - "\u0120\u00d0\u00b2\u00d0\u00bd\u00d0\u00b8\u00d0\u00bc\u00d0\u00b0\u00d0\u00bd\u00d0\u00b8\u00d0\u00b5", - "\u0120todd", - "\u0120smashed", - "\u0120invari", - "\u0120\u00d0\u00ba\u00d0\u00be\u00d0\u00bd\u00d1\u0124\u00d1\u0122", - "\u0120autistic", - "\u00ec\u0140\u00a5\u00eb\u012d\u013a", - "Res", - "\u00d0\u00b4\u00d1\u012d", - "chau", - "\u0120selv", - "\u0120h\u00c3\u00a4tten", - "\u00e0\u00a4\u00bf", - "\u0120expects", - "\u00cf\u0123\u00ce\u00b7", - "\u0120a\u00c3\u00a7\u00c4\u00b1k", - "\u0120HTTP", - "le\u00c5\u0141", - "\u0120sweeping", - "\u0120Beta", - "\u0120counterparts", - "abile", - "\u0120Sims", - "Cs", - "\u0120repar", - "squ", - "\u0120provincial", - "\u0120shareholders", - "\u0120runter", - "\u0120gedacht", - "\u0120Teen", - "\u0120grands", - "\u00e7\u0136\u00a2", - "agles", - "\u0120rocky", - "vens", - "\u0120rivals", - "unal", - "\u0120reacts", - "\u00eb\u00a9", - "\u0120mercury", - "\u0120Luigi", - "\u0120\u00d0\u00be\u00d0\u00b3", - "\u0120JUST", - "\u0120lod", - "\u0120cortex", - "wig", - "\u0120lakh", - "\u00ec\u00a4\u0133\u00ec\u0139\u0132", - "\u0120Vic", - "\u0120Mund", - "\u0120mapped", - "\u0120Dell", - "\u0120Druck", - "\u0120lifes", - "\u00d0\u00b0\u00d0\u00bb\u00d1\u012e\u00d0\u00bd\u00d0\u00be\u00d0\u00b5", - "ividual", - "ad\u00c4\u00b1m", - "\u0120atrav", - "\u0120Flug", - "\u0120Klein", - "\u00ea\u00b1\u00b0\u00ec\u0137\u00bc", - "\u00e0\u00b8\u00ab\u00e0\u00b8\u013b", - "\u0120appli", - "\u00e0\u00ae\u00be?", - "\u00c3\u00bcyorum", - "\u0120\u00d0\u00b8\u00d0\u00bd\u00d1\u0124\u00d0\u00b5\u00d1\u0122\u00d0\u00b5\u00d1\u0123\u00d0\u00bd\u00d0\u00be", - "\u0120disinfect", - ">-", - "\u0120champagne", - "\u0120kla", - "opers", - "Trans", - "\u0120Desert", - "\u0120cultivate", - "\u0120Fucking", - "idelity", - "\u0120\u00d1\u0124\u00d0\u00b0\u00d0\u00bd", - "\u0120incub", - "\u0120temu", - "\u0120learner", - "founder", - "\u0120Syl", - "\u00e3\u0124\u0122", - "\u0120fato", - "zier", - "\u0120\u00ec\u0139\u0128\u00ec\u013f\u00b4", - "\u0120\u00ec\u012a\u00a8", - "\u0120psycho", - "\u0120\u00d1\u0124\u00d0\u00b5\u00d0\u00bb\u00d0\u00b5\u00d1\u0126", - "\u0120regarde", - "\u0120representations", - "\u0120litigation", - "\u0120spann", - "ults", - "bior", - "\u00e8\u00a6\u012d\u00e3\u0123\u00a6", - "\u00e4\u00b8\u012f\u00e5\u00a4\u013c", - "\u0120Survey", - "\u0120LEDs", - "\u0120tr\u00c3\u00a4", - "\u0120l\u00c3\u00aan", - "\u0120antioxid", - "\u00d0\u00b5\u00d1\u0122\u00d0\u00be\u00d0\u00bc", - "\u0120induction", - "\u0120fooled", - "\u00c3\u00a4tzlich", - "\u0120\u00d0\u00b3\u00d0\u00be\u00d0\u00b2\u00d0\u00be\u00d1\u0122\u00d1\u0131\u00d1\u0124", - "\u0120Fact", - "umbai", - "\u0120wiggle", - "NOUN", - "\u0120d\u00c3\u00a9velopp", - "\u0120Claro", - "\u0120\u00ec\u00b8", - "\u00eb\u00ac", - "\u00e3\u0123\u00aa\u00e3\u0124\u0135\u00e3\u0123\u0142", - "\u0120accumulate", - "\u0120maintains", - "\u00eb\u0126", - "\u0120Fighter", - "\u00ed\u0128\u0142", - "\u0120matin", - "\u0120coupon", - "\u0120stunt", - "\u0120debuted", - "\u00e5\u00be\u0127\u00e3\u0123\u00a3\u00e3\u0123\u00a6", - "\u0120prag", - "\u00d0\u00b8\u00d0\u00b2\u00d0\u00b0\u00d0\u00b5\u00d0\u00bc", - "73", - "\u0120expres", - "\u0120\u00ec\u013a\u00a4\u00eb\u00b9\u0142", - "\u0120\u00d0\u00bf\u00d0\u00b5\u00d1\u0122\u00d1\u0123\u00d0\u00be\u00d0\u00bd", - "\u0120calculus", - "\u0120abrupt", - "\u0120Inspector", - "ourt", - "\u00e6\u0138\u013b", - "\u00c5\u00baniej", - "intense", - "Ba", - "\u0120lounge", - "\u0120asthma", - "\u0120Hi\u00c3\u00a7", - "\u00aa\u00bb", - "\u0120editorial", - "\u0120seize", - "\u0120k\u00c4\u00b1r", - "\u0120mouve", - "\u0120tierra", - "\u0120testosterone", - "\u0120rh", - "\u0120Kingston", - "ELLE", - "\u0120Representative", - "\u01201974", - "\u0120iba", - "Ts", - "\u0120sorta", - "\u0120(?)", - "\u0120\u00d8\u00aa\u00d9\u012a", - "\u0120\u00eb\u0124\u00b4\u00eb\u0142\u00a4", - "\u0120bekommt", - "\u0120spiritually", - "\u0120distorted", - "Mad", - "\u0120reim", - "\u00c3\u00a1nh", - "\u0120Ottoman", - "\u0120Relig", - "\u0120Els", - "\u0120retained", - "\u0120Laughs", - "\u00e6\u0122\u00bb", - "\u0120SAS", - "\u0120\u00d0\u00ba\u00d0\u00be\u00d0\u00bb\u00d0\u00b8\u00d1\u0129\u00d0\u00b5\u00d1\u0123\u00d1\u0124\u00d0\u00b2\u00d0\u00be", - "\u00d7\u0137\u00d7\u00aa\u00d7\u00a8", - "\u0120innovate", - "\u0120kork", - "\u0120\u00d1\u0122\u00d0\u00b0\u00d1\u0123\u00d1\u0123\u00d0\u00ba\u00d0\u00b0\u00d0\u00b7\u00d1\u012d\u00d0\u00b2", - "ondere", - "ivi", - "aye", - "ounty", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00bb\u00d1\u0125\u00d1\u0129\u00d0\u00b0\u00d0\u00b5\u00d1\u0124\u00d1\u0123\u00d1\u0131", - "\u0120buns", - "\u00e5\u0127\u00ab", - "\u0120y\u00c3\u00bczden", - "\u0120surgeries", - "\u00d8\u00a3\u00d9\u0128", - "\u0120bankruptcy", - "welt", - "\u0120siamo", - "\u0120darkest", - "\u0120Hann", - "gga", - "\u0120formas", - "\u0120Dj", - "named", - "\u0120shields", - "ueller", - "\u0120Few", - "\u0120lace", - "\u0120furious", - "\u0120YU", - "\u0120societal", - "\u0120judgement", - "\u0120Dos", - "\u0120jab", - "laws", - "\u0120reinvent", - "\u0120Katherine", - "\u0120Choi", - "adows", - "\u0120rans", - "oden", - "\u0120Midwest", - "n\u00c4\u00b1n", - "\u0120deport", - "\u0120Dip", - "\u00e7\u00b4\u0127", - "\u0120atenci\u00c3\u00b3n", - "\u0120Courtney", - "ividad", - "\u0120\u00da\u00a9\u00db\u0123", - "\u0120efficacy", - "\u0120Brooks", - "\u0120referral", - "\u0120\u00d0\u00ba\u00d0\u00be\u00d0\u00bd\u00d1\u0128", - "\u0120malicious", - "\u0120kir", - "\u0120Goddess", - "\u0120funky", - "\u0120interim", - "\u0120K\u00c3\u00b6rper", - "\u0120\u00ec\u0138\u00bc\u00eb\u00a7", - "kur", - "\u0120\u00d0\u00ba\u00d0\u00bb\u00d0\u00b8", - "\u0120trucs", - "gesetz", - "\u0120zug", - "\u0120Gl\u00c3\u00bcck", - "\u0120Minute", - "\u0120prestigious", - "\u0120niez", - "\u0120concentrations", - "\u00d0\u00bb\u00d0\u00b0\u00d1\u0123\u00d1\u0124\u00d0\u00b8", - "\u0120Sis", - "\u0120Vitamin", - "kov", - "\u0120PBS", - "\u0120\u00d0\u00bd\u00d0\u00b5\u00d0\u00b5", - "\u0120retailers", - "\u0120conventions", - "\u0120Samantha", - "\u0120proudly", - "Jordan", - "\u0120JASON", - "atk", - "\u0120triste", - "\u0120st\u00c3\u00a4r", - "\u0120reiterate", - "\u0120posterior", - "\u01201973", - "\u0120Pine", - "\u0120Juliet", - "\u0120pedir", - "kil", - "\u0120overlapping", - "\u0120exclude", - "\u0120econ\u00c3\u00b3m", - "\u0120accepts", - "\u0120Ster", - "\u00e6\u00b1\u00ba", - "\u0120\u00ec\u013c\u00b4\u00eb\u0131\u013b", - "estab", - "\u0120tug", - "arg", - "\u0120livro", - "\u00d8\u00a7\u00d8\u00b5", - "\u0120seams", - "\u0120buraya", - "\u0120ello", - "\u0120TM", - "\u0120Paw", - "\u0120Index", - "Exc", - "\u0120inspirational", - "\u0120dunk", - "\u00e8\u00b0\u0123", - "akter", - "\u0120conditioner", - "\u0120Salut", - "\u00c5\u0124ec", - "\u0120\u00ec\u012b\u00bd", - "\u0120\u00d1\u0125\u00d0\u00b7\u00d0\u00bd\u00d0\u00b0", - "\u0120Romeo", - "fruit", - "\u0120YO", - "\u0120ch\u00e1\u00bb\u012b", - "\u00d0\u00b1\u00d1\u0125", - "bons", - "\u0120reproductive", - "\u0120orada", - "\u0120\u00ed\u013c\u00a8", - "\u0120tentar", - "\u0120ma\u00c3\u00b1ana", - "\u00e3\u0124\u00ac", - "\u0120solvent", - "Jessica", - "\u0120Legal", - "\u0120tua", - "\u0120sic", - "\u0120EQ", - "aukee", - "\u00ec\u012d\u013e\u00eb\u012d\u00a4", - "\u0120\u00c5\u0140u", - "\u0120adhere", - "\u0120Tul", - "\u0120\u00e0\u00ae\u0128", - "\u0120textbooks", - "\u0120Fifth", - "\u0120experi", - "\u0120chic", - "\u0120heap", - "inely", - "atra", - "Two", - "\u0120helemaal", - "\u0120fren", - "\u00e6\u0130\u00a8", - "\u0120bisher", - "\u00d8\u00a7\u00d8\u00b4", - "\u0120\u00ec\u0126\u0142\u00ec\u0125\u013f", - "\u0120Tages", - "\u0120s\u00e1\u00bb\u00b1", - "\u0120bullied", - "\u00d8\u00a4", - "\u0120benefited", - "\u0120Previously", - "\u0120\u00d1\u012f\u00d1\u0126\u00d1\u0126", - "\u00d9\u012f", - "\u0120senate", - "\u0120Morm", - "ijke", - "\u0120Flu", - "\u0120incorporating", - "jack", - "\u0120\u00d0\u00bf\u00d0\u00b8\u00d1\u0124", - "\u0120imply", - "\u0120hacks", - "\u0120RICH", - "\u0120\u00d0\u00ba\u00d0\u00b2\u00d0\u00b0\u00d1\u0122", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b5\u00d0\u00ba\u00d1\u0122\u00d0\u00b0\u00d1\u0123", - "\u0120dependency", - "\u0120\u00ec\u013c\u00a9", - "\u0120\u00ec\u00b1\u0127", - "\u0120w\u00c3\u00a4hrend", - "\u0120sulla", - "\u0120Pittsburgh", - "\u0120esempio", - "\u00bc\u00eb\u00a1\u013e", - "prot", - "\u0120Rosen", - "\u0120Independence", - "\u0120parsley", - "iegen", - "\u0120haw", - "\u0120aquell", - "\u0120CAP", - "\u0120\u00d1\u0122\u00d0\u00b0\u00d0\u00b1\u00d0\u00be\u00d1\u0124\u00d0\u00b0\u00d1\u0124\u00d1\u012e", - "\u0120Cliff", - "ionar", - "\u0120securing", - "\u00e6\u012a\u0133\u00e5\u0122\u0133\u00e7\u013c\u0126", - "\u00ce\u00bd\u00ce\u00b5", - "\u0120utilis", - "\u0120coule", - "\u0120Ping", - "\u0120trek", - "\u0120fak", - "\u0120enorme", - "\u0120\u00ec\u012d\u00ab", - "\u00e8\u00ae\u00a9", - "\u0120doubling", - "\u0120\u00d0\u00bd\u00d1\u0122\u00d0\u00b0\u00d0\u00b2\u00d0\u00b8\u00d1\u0124\u00d1\u0123\u00d1\u0131", - "\u0120hed", - "hoven", - "\u0120Standing", - "\u0120m\u00c3\u0143n", - "\u0120Jimin", - "\u0120monarch", - "\u0120coke", - "\u0120mr", - "\u0120clic", - "\u00c3\u012f", - "\u0120impeachment", - "\u0120durability", - "\u0120varios", - "\u0120commercials", - "\u0120greetings", - "\u0120Ri", - "\u0120Appreci", - "\u00ec\u0140\u012a\u00eb\u012c\u0136", - "\u0120r\u00c3\u00a9sult", - "\u00c3\u00a9rt", - "\u0120salute", - "\u0120poderia", - "\u0120sunrise", - "veck", - "\u0120reluctant", - "\u0120commissioner", - "\u00e5\u00bf\u00b5", - "\u00c3\u00a2te", - "\u0120Kenny", - "\u0120Siri", - "\u00e3\u0125\u0125\u00e3\u0125\u0139", - "\u0120\u00eb\u012c\u013a", - "\u0120EE", - "\u0120unch", - "\u00d0\u00ba\u00d0\u00be\u00d0\u00bd", - "\u0120\u00d8\u00a7\u00d9\u0126\u00d8\u00a5", - "\u0120belts", - "\u0120hass", - "\u0120\u00d0\u00bc\u00d0\u00be\u00d1\u0131", - "\u0120displaced", - "\u0120abra", - "\u00ce\u0143\u00ce\u00bb", - "\u0120scratches", - "\u0120comet", - "\u0120authorization", - "\u0120LLC", - "\u0120produk", - "\u0120rehabilitation", - "\u00e5\u0140", - "\u00d1\u0138\u00d1\u0129", - "uding", - "olit", - "\u0120105", - "\u0120expands", - "\u0120altri", - "\u0120Komment", - "\u0120anf", - "Pl", - "\u0120Mana", - "fed", - "\u0120bri", - "\u0120ora", - "Gs", - "\u0120Gur", - "uckland", - "\u0120junction", - "\u0120ironic", - "\u0120Feed", - "\u0120prakt", - "\u0120Hammer", - "\u012e\u00eb\u0131\u0126", - "\u0120Tracy", - "\u00e7\u00b5\u00b1", - "\u0120Aside", - "\u00d0\u00bd\u00d0\u00b5\u00d0\u00b3\u00d0\u00be", - "\u0120\u00d0\u00b8\u00d1\u0123\u00d0\u00bf\u00d0\u00be\u00d0\u00bb\u00d1\u012e\u00d0\u00b7\u00d0\u00be\u00d0\u00b2\u00d0\u00b0\u00d1\u0124\u00d1\u012e", - "\u0120zaj", - "\u0120equitable", - "\u0120curb", - "\u0120\u00e3\u0123\u0135\u00e3\u0124\u012e", - "\u0120derivatives", - "\u0120puppies", - "\u0120Kenneth", - "\u0120Compl", - "igram", - "\u0120Garcia", - ")\"", - "\u0120Harbor", - "estial", - "\u0120\u00e4\u00be\u0128", - "\u0120ers", - "\u00e6\u00b9", - "\u0120unwanted", - "\u0120belang", - "\u00d0\u00b0\u00d0\u00b3\u00d0\u00be", - "emb", - "dos", - "\u0120\u00ec\u013b\u013e\u00eb", - "\u0120Budget", - "\u0120battling", - "\u00d8\u0143\u00d8\u00aa", - "kok", - "\u00d0\u00bd\u00d0\u00b0\u00d1\u0129\u00d0\u00b0\u00d0\u00bb\u00d0\u00b0", - "\u0120plag", - "\u0120cantidad", - "\u0120grupos", - "\u0120plugins", - "lerini", - "\u0120\u00d0\u00b8\u00d0\u00bc\u00d0\u00b5\u00d0\u00b5\u00d1\u0124", - "\u0120sozusagen", - "olics", - "\u0120pueblo", - "\u0120reminis", - "r\u00c3\u00a4n", - "\u0120Morrison", - "\u0120linha", - "\u0120breaths", - "\u0120Taste", - "\u0120enfrent", - "\u0120Docker", - "\u0120\u00d0\u00b4\u00d0\u00b5\u00d0\u00bd", - "\u0120ethnicity", - "\u0120wob", - "\u0120suffers", - "\u0120transitioning", - "\u0120Range", - "\u00c4\u013bdzy", - "\u0120\u00d0\u00ba\u00d0\u00b0\u00d1\u0124", - "\u0120syner", - "\u0120donut", - "\u0120probabilities", - "\u0120Omar", - "Which", - "uish", - "isin", - "\u0120demos", - "\u0120\u00ec\u0142\u0122\u00ea\u00b8\u00b0", - "\u0120\u00eb\u013a\u0133\u00ea\u00b0\u013b", - "\u0120\u00d0\u00b5\u00d0\u00b4\u00d0\u00b8\u00d0\u00bd", - "\u0120cerve", - "\u0120joka", - "IAN", - "\u0120kilometer", - "\u0120horizontally", - "\u0120Bhag", - "\u0120->", - "\u0120Monitor", - "\u0120knowledgeable", - "\u0120fav", - "\u0120pinned", - "\u0120eBay", - "icker", - "\u0120\u00ec\u0140\u0142\u00ea\u00b9\u0132\u00eb\u00a7\u012e", - "\u0120Xiaomi", - "\u0120capit", - "\u0120np", - "\u01201965", - "hoe", - "\u0120nok", - "\u0120Sage", - "\u0120\u00d0\u00bd\u00d0\u00b5\u00d0\u00bb\u00d1\u012e\u00d0\u00b7\u00d1\u0131", - "\u0120Tow", - "gam", - "\u0120dicen", - "\u0120SUBSCRIBE", - "\u0120reboot", - "\u0120paj", - "\u0120\u00eb\u00b3\u00b4\u00ec\u0139\u00ac\u00eb", - "\u0120thicken", - "\u0120Reality", - "id\u00c3\u00a4n", - "Na", - "\u0120\u00ea\u00b2\u0125\u00ec\u013f\u0122", - "!!)", - "\u0120routines", - "\u0120\u00d0\u00be\u00d0\u00b4\u00d0\u00bd\u00d0\u00be\u00d0\u00b3\u00d0\u00be", - "\u0120exting", - "\u0120\u00ec\u00a6\u013f", - "\u0120sulfur", - "\u0120carve", - "\u0120asteroid", - "\u0120Warrior", - "\u0120photographers", - "\u0120pell", - "\u0120crossover", - "\u00e6\u012a\u0133\u00e7\u0141\u00a5\u00e9\u0123\u0135", - "\u0120hacemos", - "\u0120Nej", - "\u0120settling", - "\u0120irm", - "\u0120Books", - "ient\u00c3\u00b4t", - "\u0120espacio", - "\u0120Scholars", - "\u0120doomed", - "\u0120IRS", - "wohl", - "\u0120segue", - "\u0120\u00eb\u012a\u0126\u00ea\u00b0\u0122", - "\u0120pratic", - "BT", - "\u0120Considering", - "\u0120Buffalo", - "\u0120trainings", - "\u0120gebru", - "\u0120Gleich", - "\u0120pirates", - "\u0120envelop", - "\u0120reopen", - "imat", - "\u0120tee", - "\u0120sued", - "feh", - "\u0120\u00d7\u0136\u00d7\u00a7", - "\u0120diets", - "\u0120juntos", - "asto", - "\u0120misunderstood", - "\u0120ruim", - "\u0120classify", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d0\u00b4\u00d1\u0125\u00d0\u00ba", - "\u0120inse", - "\u0120illustrated", - "\u0120corrosion", - "\u0120accred", - "\u0120Auntie", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b8\u00d0\u00b2\u00d0\u00b5\u00d1\u0124", - "\u0120LIVE", - "\u0120rek", - "\u0120receipt", - "\u00e5\u012a\u00b0\u00e5\u00ba\u0137", - "\u0120Barbie", - "\u0120Snake", - "turn", - "Jeff", - "\u00e3\u0123\u012c\u00e3\u0123\u012c", - "\u0137\u0126", - "VOICEOVER", - "coll", - "\u0120runners", - "\u00ec\u0142\u013e\u00eb", - "osos", - "moon", - "\u0120keynote", - "\u0120Instit", - "SPEAK", - "\u0120plugs", - "\u0120curv", - "\u0120Yuri", - "\u0120Theres", - "\u0120Ps", - "\u0120\u00ce\u00bc\u00cf\u0122\u00ce\u00bf", - "\u0120converter", - "\u0120refine", - "\u0120badass", - "\u0120\u00ce\u00bf\u00ce\u00b9", - "\u0120regen", - "azzi", - "\u00d9\u012c\u00d9\u0123", - "\u0120seized", - "\u0120i\u00c3\u00a7er", - "ilee", - "\u0120upstream", - "\u0120buds", - "\u0120pim", - "\u0120\u00ed\u0137\u013a\u00eb\u00a3\u00a8", - "\u0120alluded", - "\u0120themed", - "\u0120consisting", - "\u0120bons", - "unuz", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d0\u00b2\u00d0\u00be\u00d0\u00b4", - "\u0120Lovely", - "\u00e0\u00a5\u012d", - "\u0120parach", - "\u0120Staats", - "\u00e9\u013c\u012c", - "\u0120selective", - "\u0120fase", - "\u0120Georget", - "\u0120cocaine", - "\u0120reproduction", - "\u0120Lara", - "\u0120LD", - "\u0120gh", - "Jon", - "\u0120l\u00c3\u00a5", - "\u0120\u00eb\u0133\u0132\u00eb", - "\u0120typed", - "\u0120Bana", - "\u00eb\u0135\u013e\u00eb", - "\u0120savory", - "\u0120Zomb", - "standen", - "\u0120pedestrian", - "\u0120diff\u00c3\u00a9rents", - "\u0120\u00ec\u012d\u00b8", - "\u00e8\u012b\u00af", - "\u0120complained", - "\u00e7\u00a6\u0131", - "\u0120\u00d0\u013c\u00d1\u0124\u00d0\u00be", - "\u0120\u00d7\u013e\u00d7\u00a4", - "ali\u00c5\u013dmy", - "\u0120mortar", - "\u0120verdict", - "\u0120suficiente", - "\u0120Million", - "mittel", - "inals", - "\u0120\u00d8\u00a7\u00d9\u0126\u00d8\u00ae", - "\u00d0\u00b0\u00d1\u0130\u00d1\u0123\u00d1\u012e", - "\u0120mi\u00c4\u013bdzy", - "\u0120Ole", - "\u0120invert", - "czy\u00c4\u0129", - "\u00d0\u00be\u00d0\u00b7\u00d0\u00bc\u00d0\u00be\u00d0\u00b6\u00d0\u00bd\u00d0\u00be", - "starter", - "\u0120auditor", - "\u0120Scout", - "chien", - "\u0120Sverige", - "uffled", - "\u0120zehn", - "\u0120Auckland", - "\u0120argent", - "\u01201976", - "\u0120Hoe", - "\u0120bothers", - "\u0120socialist", - "\u0120pliers", - "\u0120emergen", - "\u0120XP", - "\u00d0\u00b5\u00d1\u0122\u00d0\u00be\u00d0\u00b2", - "More", - "\u0120Levi", - "\u0120Anders", - "ibilidad", - "\u0120Parents", - "\u0120induced", - "\u00ec\u0138\u00b4\u00ec\u00a4", - "\u0120balances", - "\u0120\u00d0\u00b2\u00d1\u012d\u00d1\u012a", - "\u0120submarine", - "Start", - "\u0120dries", - "\u0120volver", - "\u0120ticking", - "cott", - "\u0120faj", - "pr\u00c3\u00a9s", - "\u0120Sabb", - "\u0120\u00d0\u00b7\u00d0\u00b0\u00d1\u0129", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00ba\u00d1\u0125\u00d0\u00bf", - "\u0120baptized", - "\u0120Brilliant", - "\u0120\u00d0\u0133\u00d0\u00be\u00d0\u00b3", - "\u0120mots", - "bits", - "\u0120lattice", - "\u00e6\u012a\u0133\u00e8\u00b7\u0141\u00e4\u00bd\u0142", - "\u0120coriander", - "\u0120residency", - "ync", - "\u0120pierwszy", - "\u0120Knock", - "\u0120Zap", - "\u0120\u00d0\u0137\u00d0\u00b2", - "\u00ea\u00b2\u00ac", - "\u00e5\u00b0\u0131\u00e5\u00bf\u0125", - "\u0120uneven", - "\u0120Jas", - "odor", - "\u00e7\u00bf\u0134", - "74", - "\u0120Site", - "\u0120aconteceu", - "ympt", - "\u0120trilogy", - "\u0120lantern", - "\u0120Zucker", - "vari", - "welling", - "\u0120Potato", - "gomery", - "\u0120reacted", - "\u0120Chron", - "\u0120jede", - "beeld", - "\u0120twent", - "\u0120lact", - "\u00e6\u00a8\u0124", - "\u0120r\u00c3\u00a9se", - "\u0120relent", - "\u0120furnace", - "\u0120widget", - "\u0120earthquakes", - "\u0120Adjust", - "ilit", - "\u0120\u00d8\u00a3\u00d9\u012a", - "\u0120hearings", - "\u0120defendant", - "irsiniz", - "\u0120bask", - "cja", - "\u013e\u00a8", - "\u0120rifles", - "\u0120instal", - "\u0120Forgive", - "pical", - "\u0120\u00d0\u0140\u00d1\u0129\u00d0\u00b5\u00d0\u00bd\u00d1\u012e", - "\u0120petites", - "\u0120hp", - "\u0120renowned", - "\u0120Inn", - "\u0120\u00ec\u00a3\u00bc\u00ec\u0126\u00b8\u00ec\u013c\u0136", - "\u0120emphasized", - "\u00e9\u0139\u00ae\u00e9\u00a2\u013a", - "\u0120\u00ec\u0140\u012a\u00ec\u00a3\u0142", - "\u0120\u00ea\u00b2\u0125\u00ec\u013e\u00bc\u00eb\u00a1\u013e", - "\u00e3\u0124\u0128", - "\u00c5\u0135", - "gili", - "Dave", - "\u0120exhausting", - "\u00c5\u0124ug", - "\u0120schema", - "\u00ce\u00bc\u00ce\u00ac", - "cycl", - "\u0120autant", - "\u0120parcel", - "\u0120materia", - "\u0120Berry", - "\u0120\u00d1\u0123\u00d0\u00b0\u00d0\u00bc\u00d0\u00b8", - "\u0120extracted", - "\u0120Saying", - "ismatic", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d0\u00b1", - "\u0120neuron", - "graph", - "\u013e\u00eb\u00a9\u00b4", - "\u0120enclosure", - "\u0120Johann", - "\u0120aftermath", - "\u00d1\u0124\u00d0\u00be\u00d0\u00b1", - "\u0120u\u00c5\u00bcy", - "\u0120samp", - "360", - "\u0120Mei", - "\u0120taco", - "\u0120receptors", - "\u0120punches", - "\u0120Hoje", - "\u0120\u00d9\u0129\u00d9\u0128\u00d8\u00a7", - "=\"#", - "\u0120Angular", - "\u0120musique", - "\u0120rol", - "\u0120\u00c3\u00b1", - "sterreich", - "\u0120clam", - "\u0120Treasury", - "chemical", - "\u0120apar", - "\u0120append", - "\u0120forbid", - "\u0120Hamburg", - "\u00d0\u00b0\u00d0\u00ba\u00d0\u00be\u00d0\u00b2", - "\u0120\u00ea\u00b8\u012a", - "ilda", - "\u0120preparations", - "\u0120mog\u00c4\u0127", - "\u0120camino", - "Eric", - "\u0120Blind", - "\u00e8\u012a\u0129", - "\u00e5\u00b9\u00b4\u00e7\u013c\u0126", - "\u0120Discovery", - "\u00ec\u00b8\u0142", - "\u00e7\u012a\u00b6", - "\u0120interpreter", - "\u0120bred", - "\u0120Psalm", - "\u0120defended", - "\u00ec\u012b\u00ac", - "\u0120Erfahr", - "\u0120Peach", - "\u0120moons", - "\u0120Ost", - "\u0120sp\u00c3\u00a9cial", - "\u0120arriver", - "\u0120Wis", - "uci", - "\u0120robotics", - "IVE", - "\u0120siege", - "arla", - "\u0120separates", - "\u0120TC", - "\u00ed\u0131\u00b0", - "quisite", - "\u0120parentheses", - "\u00d0\u00b8\u00d0\u00ba\u00d0\u00b5", - "\u00e7\u00ab\u013b", - "\u0120trous", - "\u00e5\u00bb\u00ba", - "\u0120\u00d1\u0123\u00d0\u00b8\u00d0\u00bb\u00d1\u012e", - "\u0120beers", - "\u0120\u00d0\u00bf\u00d0\u00bb\u00d0\u00b0\u00d1\u0124", - "\u00e3\u0123\u013b\u00e3\u0123\u0136\u00e3\u0123\u0126", - "\u0120sola", - "\u0120d\u00c3\u00a8s", - "mingham", - "ikte", - "\u0120oops", - "\u0120twitch", - "\u00e5\u00b0\u0129", - "\u00cf\u012a", - "\u0120Shouldn", - "uvre", - "\u0120leer", - "criptions", - "\u0120eyeshadow", - "\u0120Guo", - "\u0120Powell", - "\u0120supuesto", - "\u0120ana", - "rals", - "\u0120Montreal", - "\u0120surfing", - "\u0120\u00d0\u0141\u00d0\u00b5\u00d1\u0122\u00d0\u00b2", - "\u00d7\u0140\u00d7\u0137", - "\u0120milliseconds", - "\u0120suburbs", - "\u0120planeta", - "\u00d1\u0125\u00d1\u012a\u00d0\u00ba\u00d0\u00b0", - "hrlich", - "\u0120HY", - "\u0120\u00d8\u00b3\u00db\u0134", - "\u0120MM", - "\u0120Eff", - "\u00e5\u0131\u00af\u00e6\u0126\u013d", - "\u0120HS", - "anson", - "\u0120\u00ec\u00a7\u0123\u00ec\u0142\u0133", - "\u0120suo", - "\u0120deploying", - "\u0120kunt", - "tering", - "\u0120erect", - "\u00ec\u0140\u00a5\u00ec\u013f\u00b4", - "\u0120\u00ec\u013f\u012e\u00ec\u012d\u013f", - "\u0120specimen", - "!...", - "\u00e6\u012a\u0133\u00e8\u00aa\u00aa", - "\u0120ligne", - "\u0120konst", - "adequ", - "\u0120\u00ec\u0125\u0123\u00ed\u0125\u013e", - "\u0120accessed", - "\u0120Pole", - "kill", - "\u0120\u00eb\u00b2\u0126\u00eb", - "\u0120authenticity", - "\u0120appelle", - "ulle", - "\u0120revision", - "\u0120goats", - "\u00d0\u00b3\u00d0\u00bb\u00d0\u00b8", - "\u0120pau", - "\u0120Ranger", - "\u0120Imag", - "author", - "\u0120eve", - "\u0120Messenger", - "\u0120nay", - "\u0120wholes", - "\u00c3\u00a4tte", - "\u0120onwards", - "\u0120Depois", - "\u0120\u00ed\u0133\u013e\u00ed\u013a\u0126", - "\u0120SARS", - "\u0120wszystkich", - "\u0120destru", - "umbing", - "\u0120compatibility", - "\u0120misinformation", - "odore", - "\u0120Favor", - "eko", - "\u0131\u012e", - "waukee", - "\u0120Teaching", - "\u0120KO", - "\u0120betting", - "\u0120quests", - "\u0120vivre", - "\u0120\u00d0\u00bc\u00d1\u0125\u00d0\u00b7\u00d1\u012d", - "\u0120saga", - "\u0120swell", - "\u0120gehe", - "\u00e6\u0122\u0130\u00e9\u00ba\u00bc\u00e6\u00a8\u00a3", - "\u0120\u00d0\u00be\u00d1\u0122\u00d0\u00b3\u00d0\u00b0\u00d0\u00bd\u00d0\u00b8\u00d0\u00b7", - "\u0120gide", - "\u0120Gross", - "\u0120dalej", - "\u0120claws", - "\u00e1\u00bb\u013bc", - "\u0120prejudice", - "\u0120insign", - "ihood", - "\u0120pled", - "\u0120d\u00c3\u00b3nde", - "\u0120Political", - "\u0120premises", - "undert", - "\u00d8\u00b9\u00d8\u00aa", - "onnen", - "\u0120espa\u00c3\u00a7o", - "\u0120f\u00c3\u00a9", - "\u0120Harrison", - "\u0120Census", - "\u0120cardio", - "\u0120diy", - "\u0120milieu", - "\u0120journ\u00c3\u00a9e", - "\u0120Release", - "NIE", - "\u0120Muk", - "id\u00c3\u00a9e", - "\u00e1\u00bb\u012fi", - "\u0120i\u00c3\u00a7inde", - "\u0140\u013b", - "\u0120resonate", - "\u0120moles", - "\u0120Flying", - "\u0120Gloria", - "\u0120Pastor", - "\u0120Arena", - "\u00e5\u00a5\u00bd\u00e4\u00b8\u012f\u00e5\u00a5\u00bd", - "NON", - "\u00d0\u00be\u00d0\u00bb\u00d0\u00be\u00d0\u00b2", - "\u0120all\u00c3\u0143", - "omat", - "\u00ec\u0138\u00b4\u00eb\u0131\u0126", - "\u0120caracter\u00c3\u0143st", - "\u0120declining", - "\u00d1\u0138\u00d1\u0131", - "anco", - "\u0120Inform", - "\u0120bargain", - "\u0120bushes", - "\u0120Naturally", - "\u0120rechts", - "\u0120Tensor", - "\u0120Patricia", - "\u0120principio", - "\u0120Mumbai", - "\u0120womb", - "\u0120nostra", - "\u0120dilemma", - "\u0120irgendwann", - "\u01201964", - "\u0120energ\u00c3\u0143a", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d1\u0122", - "\u0120segregation", - "\u0120Athlet", - "\u0120\u00c2\u00bb,", - "\u0120yeni", - "\u0120Seit", - "\u0120venom", - "\u0120dakika", - "\u0120\u00eb\u0131\u012e\u00eb", - "\u0120\u00c3\u012bl", - "\u0120fus", - "\u0120Mog", - "\u00a6\u00bd\u00eb\u012d\u012a\u00eb\u012d\u00a4", - "\u0120remar", - "\u0120Teddy", - "\u0120breasts", - "icans", - "\u00e6\u0136\u00b6\u00e7\u013e\u012d", - "kap", - "\u0120h\u00c6\u00a1n", - "\u0120JP", - "\u00e3\u0125\u00b3\u00e3\u0124\u00bf", - "\u0120resurrect", - "\u0120\u00ec\u013f\u00b8\u00eb", - "herical", - "\u0120fotograf", - "\u0120Jos\u00c3\u00a9", - "\u0120livelihood", - "\u0120bibli", - "teri", - "\u0120vorstellen", - "\u0120AAA", - "\u0120assessing", - "YA", - "\u0120splend", - "\u0120excav", - "\u0120baptism", - "yll", - "wow", - "Mac", - "\u0120plastics", - "teokbokki", - "\u0120int\u00c3\u00a9ressant", - "\u0120commanded", - "\u0120famously", - "\u0120\u00d0\u013a\u00d0\u00bb\u00d0\u00b8", - "\u0120Manuel", - "\u0120southwest", - "\u0120deformation", - "\u00c3\u0143culo", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d1\u0127\u00d0\u00be\u00d0\u00b4\u00d0\u00b8\u00d1\u0124\u00d1\u0123\u00d1\u0131", - "\u0120Patter", - "degree", - "\u0120cz\u00c4\u013bsto", - "\"-", - "\u0120\u00ec\u0127\u012d", - "\u0120manger", - "\u0120Trustee", - "\u0122\u00eb\u00a6\u00ac", - "\u0120puntos", - "ivable", - "\u0120volatile", - "\u0120\u00eb\u012c\u0132", - "\u0120instability", - "\u0120ciel", - "ci\u00c4\u0127", - "\u0120purity", - "\u00d0\u00bd\u00d0\u00be\u00d1\u0123\u00d1\u0124", - "Sil", - "edar", - "\u00e5\u013b\u00a8", - "NOUNCER", - "\u0120spelled", - "GER", - "\u0120sanctuary", - "\u0120accelerating", - "\u0120scout", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b5\u00d0\u00b2", - "fahren", - "\u00e3\u0123\u0135\u00e3\u0123\u00a1\u00e3\u0124\u012b", - "\u0120\u00eb\u0124\u013a\u00ec\u013a\u00a8", - "\u0120pocz\u00c4\u0127t", - "\u0120Meu", - "kaar", - "\u00b3\u00b4\u00ea\u00b3\u0142", - "akra", - "Down", - "\u0120\u00c3\u0126r", - "\u0120Elite", - "\u0120allons", - "\u0120mayonnaise", - "\u0120Sustain", - "prisingly", - "\u0120supervis", - "\u0120\u00ea\u00b7\u00b8\u00eb\u0142\u0129\u00ec\u00a3\u0142", - "\u0120unemployed", - "\u0120freshly", - "\u0120\u00d7\u0140\u00d7\u00a2", - "\u0120Dh", - "\u0120tackling", - "\u0120ogr", - "\u0120\u00ec\u00b4\u012a\u00eb", - "\u00e3\u0124\u012a\u00e3\u0124\u012f", - "\u0120loft", - "arah", - "\u0120Airl", - "\u0120Dir", - "\u0120\u00d0\u013e\u00d0\u00be\u00d0\u00b6\u00d0\u00bd\u00d0\u00be", - "\u0120booking", - "\u0120CRA", - "\u0120https", - "\u0120choke", - "\u0120gown", - "\u0120noite", - "\u0120zac", - "istol", - "\u0120secre", - "\u0120resembles", - "\u0120cuad", - "\u00ec\u0124\u00ac\u00ea\u00b0\u0122", - "show", - "\u0120blanc", - "\u0120agu", - "\u0120Print", - "asted", - "\u0120Weather", - "ipl", - "\u0120obscure", - "\u0120conte", - "oughs", - ");", - "\u0120Dame", - "\u00e4\u00b8\u0122\u00e7\u013d\u00b4", - "\u0120clarification", - "\u0120intimacy", - "\u0120uphold", - "\u0120Mirror", - "\u0120wagon", - "xide", - "\u0120clog", - "apper", - "\u0120Immediately", - "\u00c3\u00bade", - "\u0120touchdown", - "\u0120rooft", - "\u00d0\u00b0\u00d1\u012a\u00d0\u00b0", - "\u0120\u00c3\u00a7\u00c4\u00b1kt", - "\u0120laisser", - "\u0120Unreal", - "ensitive", - "\u0120123", - "\u0120plaster", - "\u0120ducks", - "\u0120etme", - "\u0120bishop", - "brevi", - "\u0120bic", - "\u00e4\u00b8\u012d\u00e5\u0130\u00bb", - "\u0120runtime", - "\u0120ambitions", - "\u00d0\u00bc\u00d0\u00b0\u00d1\u0124", - "\u0120Wein", - "\u0120Mari", - "\u0120\u00ed\u012c\u00b8\u00eb", - "\u0120resolver", - "\u0120ng\u00c3\u0142y", - "\u0120Rise", - "\u00e3\u0124\u012a\u00e3\u0123\u0128\u00e3\u0123\u00ab", - "\u0120Crus", - "\u0120merchandise", - "\u0120eli", - "\u0120statewide", - "\u0120owl", - "\u00e9\u0123\u0142", - "\u00e6\u0136\u00b9", - "\u0120twisting", - "\u0120contaminated", - "\u0120Commerce", - "hythm", - "\u0120\u00c3\u012a", - "\u0120\u00ec\u012d\u00a4\u00eb", - "\u0120musste", - "uir", - "\u0120sums", - "\u0120Somewhere", - "\u00e3\u0125\u0130", - "\u0120kami", - "\u0120aired", - "\u0120ANDREW", - "\u0120\u00ea\u00ba", - "\u0120viendo", - "\u0120antibody", - "\u0120absolument", - "\u0120protesters", - "\u0120Qu\u00c3\u00a9bec", - "stadt", - "Shaun", - "\u0120chambers", - "\u0120Wear", - "\u0120Effects", - "\u0120hazards", - "\u0120nei", - "\u0120coraz\u00c3\u00b3n", - "\u0120\u00e1\u00bc", - "\u0120SG", - "\u0136\u00a9", - "\u0120\u00ec\u0139\u0143\u00ec\u012d\u013e", - "\u0120comfy", - "\u0120Cody", - "\u0120pensando", - "\u0120ganska", - "\u0120Across", - "\u00c3\u00b6llig", - "abyte", - "\u0120wedge", - "\u0120kalian", - "\u0120sigue", - "endes", - "\u0120Gro\u00c3\u0141", - "\u0120utiliser", - "\u0120flown", - "\u00d0\u00b0\u00d0\u00bd\u00d0\u00b8\u00d1\u0130", - "\u0120levar", - "restrial", - "\u0120illustrations", - "\u0120asl\u00c4\u00b1nda", - "BLEEP", - "\u0120\u00d0\u00b4\u00d0\u00be\u00d1\u0123\u00d1\u0124", - "\u0120turret", - "\u0120suitcase", - "zi\u00c4\u013bki", - "\u0120sketches", - "\u0120acred", - "\u0120Rei", - "\u0120tsun", - "\u0120Sag", - "\u0120thirds", - "\u0120KIRBY", - "rai", - "\u0120humanos", - "\u0120recommends", - "\u0120extraordinarily", - "\u0120commencement", - "KN", - "opez", - "\u0120\u00d7\u0133\u00d7\u00a9", - "\u0120lethal", - "\u0120Estamos", - "\u0120inspector", - "\u0120Seok", - "eun", - "\u0120offshore", - "\u0120gettin", - "years", - "\u0120Silence", - "\u0120Natur", - "upun", - "\u0120trzy", - "\u0120noget", - "\u0120hamburger", - "\u0120Praise", - "\u00c3\u00a9nd", - "\u01201971", - "ylie", - "krit", - "\u0120\u00ec\u0125\u013f\u00ea\u00b0\u0123\u00ec\u013f\u00b4", - "\u00e7\u013c\u00ae", - "\u0120momentos", - "\u0120est\u00c3\u00a9", - "\u0120dissemin", - "\u0120gigs", - "\u0120desaf", - "\u0120avis", - "\u0120Zoo", - "\u0120\u00ec\u0137\u012c\u00ec\u013f\u0122", - "h\u00c3\u00a4ng", - "\u00e5\u0131\u00a5", - "hake", - "\u0120Bism", - "\u0120rethink", - "\u0120Malcolm", - "\u0120identifies", - "lower", - "ixel", - "\u0120tv\u00c3\u00a5", - "ked", - "ierz", - "\u0120\u00c3\u00b6ffentlich", - "\u0120proclaim", - "soon", - "lol", - "\u0120loi", - "\u0120bitten", - "rollo", - "\u0120sermon", - "\u0120esqu", - "\u0120jackets", - "\u0120gr\u00c3\u00a1fic", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00ba\u00d0\u00b0\u00d0\u00b7\u00d1\u012d\u00d0\u00b2", - "\u0120cabeza", - "chodzi", - "\u0120pelvis", - "\u0120nostalgia", - "\u0120brew", - "\u0120shortcuts", - "\u0120Adem\u00c3\u00a1s", - "\u0120superficial", - "\u00e5\u0127\u00a9\u00e5\u0122\u012d", - "\u0120boca", - "\u0120\u00e6\u012a\u0133\u00e6\u013a\u00af", - "imentos", - "\u00e5\u013d\u0142\u00e4\u00b8\u00ba", - "\u0120sprouts", - "\u00e9\u00a3\u013d", - "\u0120Jonas", - "\u0120Florence", - "static", - "daughter", - "*)", - "\u00c5\u0124by", - "fashion", - "\u0120Ginger", - "\u0120\u00eb\u00a7\u00a4\u00eb", - "\u0120hustle", - "utos", - "\u0120\u00d1\u0124\u00d1\u0131\u00d0\u00b6", - "\u0120L\u00c3\u00b6s", - "\u00d7\u00a9\u00d7\u013b\u00d7\u013f", - "anych", - "tuber", - "\u0120tidy", - "\u0120frontal", - "\u0120whiskey", - "\u0120humid", - "\u0120\u00ce\u0141", - "\u0120ridge", - "\u0120marin", - "\u0120bient\u00c3\u00b4t", - "\u0120Carrie", - "chw", - "\u0120tahun", - "\u0120Ergeb", - "FR", - "\u0120\u00ec\u0142\u0137\u00eb\u00b6\u0122", - "\u0120Soldier", - "\u0120enlightenment", - "\u0120examining", - "\u0120Notre", - "\u0120eram", - "\u0120Sunny", - "\u0120layered", - "\u0120Dazu", - "rades", - "\u00e5\u00a5\u00bd\u00e5\u0132\u0125", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d1\u012a\u00d0\u00b5\u00d0\u00b9", - "\u0120timber", - "\u0120manners", - "\u0120Birmingham", - "\u0120miniature", - "ometers", - "\u0120filler", - "\u0120Rip", - "\u0120Komb", - "owner", - "\u00ec\u00bf", - "idian", - "\u0120dem\u00c3\u00a1s", - "\u0120\u00d9\u012a\u00d8\u00aa", - "\u0120precautions", - "\u0120governo", - "zelf", - "\u0120Complete", - "\u00e5\u00b8\u0125", - "\u0120Phantom", - "\u00e3\u0123\u00be\u00e3\u0123\u013c", - "\u0120\u00d0\u00bd\u00d0\u00b5\u00d0\u00b7", - "\u0120\u00d0\u00ba\u00d0\u00b0\u00d1\u0122\u00d1\u0124", - "\u0120Antwort", - "\u0120Pfizer", - "\u0120Franco", - "\u0120w\u00c5\u0124", - "\u0120frig", - "esper", - "\u0120kale", - "\u0120filmmaker", - "\u0120kurt", - "\u0120invalid", - "\u00e5\u00b1\u0122", - "arella", - "\u00c4\u0125ng", - "ramento", - "\u0120nutritional", - "\u0120dictators", - "\u0120afin", - "\u0120fuzzy", - "\u0120Gina", - "\u00c3\u00b3t", - "\u0120Extremadura", - "\u0120demonstrations", - "\u0120Montgomery", - "\u00ed\u0137\u00b4\u00ec\u0126\u00a4", - "\u0120Gandhi", - "\u00e3\u0125\u013f", - "\u00e7\u00bd\u00ae", - "\u0120reunion", - "\u0120jaki\u00c5\u013d", - "\u0120Zug", - "OUGH", - "lifting", - "\u0120\u00e0\u00b2", - "\u00e1\u00b9\u013d\u00e1\u00b9\u00a3", - "eb", - "\u0120WOW", - "\u0120Shiva", - "ometry", - "\u0120wildly", - "\u0120tended", - "\u0120megap", - "\u00ec\u00b2\u013a", - "\u0120nause", - "\u0120gerek", - "\u00e3\u0125\u012d", - "\u0120Marcel", - "\u0120neste", - "\u00d8\u00ae\u00d8\u00b1", - "\u0120feh", - "\u00e5\u0128\u0127", - "suspenseful", - "\u0120Wrestle", - "\u0120Palestinians", - "\u0120GORD", - "iyet", - "\u0120\u00d1\u0122\u00d0\u00b0\u00d0\u00b4\u00d0\u00b8", - "\u0120versuchen", - "\u0120transistor", - "\u0120\u00d0\u0141\u00d1\u0122\u00d0\u00be\u00d1\u0123\u00d1\u0124\u00d0\u00be", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00bd\u00d1\u0122\u00d0\u00b0\u00d0\u00b2", - "\u0120rhyme", - "\u0120Vermont", - "platz", - "\u00e8\u00ae\u00b0", - "\u0120\u00c4\u00b0\u00c5\u0141te", - "\u0120Hag", - "\u0120\u00d0\u013a\u00d0\u00bc", - "\u0120\u00d1\u0122\u00d0\u00b0\u00d1\u0123\u00d1\u0123\u00d0\u00ba\u00d0\u00b0\u00d0\u00b7", - "\u0120metros", - "\u0120Infinity", - "wolf", - "ibal", - "ftig", - "\u0120\u00da\u0128", - "\u0120\u00ed\u013a\u00b9\u00ec\u012d\u013e", - "\u0120oggi", - "\u0120disposit", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b8\u00d0\u00bb", - "\u0120\u00d0\u00b2\u00d1\u012d\u00d0\u00bf\u00d0\u00be\u00d0\u00bb", - "\u0120th\u00c3\u00b4i", - "\u0120KENN", - "\u0120handing", - "actus", - "\u0120tacos", - "\u0120formerly", - "\u0120Corinthians", - "\u00e3\u0123\u00ab\u00e3\u0123\u00af", - "\u00d1\u0128\u00d1\u0138\u00d1\u0139", - "\u0120padre", - "\u0120congregation", - "\u00e6\u0133", - "fert", - "\u0120subir", - "aiser", - "qua", - "araoh", - "\u0120Curry", - "\u0120\u00ec\u0137\u012c\u00eb\u012c\u0136", - "\u00d0\u00b5\u00d0\u00bb\u00d1\u0130", - "\u0120fuss", - "\u0120booty", - "\u0120lows", - "\u0120hommes", - "\u0120MH", - "\u0120Disneyland", - "went", - "\u0120residue", - "\u0120beeping", - "\u00e8\u00bc\u0137", - "\u00c3\u00a4tta", - "\u0120mould", - "\u0120Projekt", - "stalk", - "\u0120artifact", - "\u0120Antrag", - "\u0120AMD", - "\u0120Crypt", - "\u0120\u00eb\u00a9\u0136", - "\u0120Felipe", - "\u0120COB", - "elu", - "\u0120selfies", - "\u0120Santi", - "chutz", - "\u0120\u00d0\u00a3\u00d0\u00ba\u00d1\u0122\u00d0\u00b0\u00d1\u0139", - "gesamt", - "\u0120flock", - "jaz", - "plain", - "\u0120wrinkles", - "\u0120reais", - "\u0120paljon", - "\u0120empowerment", - "\u0120attendees", - "ppa", - "\u0120neden", - "\u00d0\u00be\u00d0\u00bd\u00d1\u012d", - "\u0120timeframe", - "\u0120Cherry", - "\u0120id\u00c3\u00a9e", - "\u0120gag", - "\u0120donkey", - "\u0120\u00c3\u00b4ng", - "\u0120Hare", - "\u00e9\u013c\u013d", - "\u0120Kara", - "\u0120acompan", - "places", - "imientos", - "\u0120Hamm", - "\u00d0\u00b1\u00d0\u00b8", - "uben", - "iliyor", - "\u0120thirst", - "\u0120kry", - "\u0120Georgetown", - "\u00d7\u0142\u00d7\u0136", - "\u0120orch", - "\u0120heartbeat", - "\u0120transformations", - "estones", - "\u0120KH", - "\u0120cartoons", - "\u0120anci", - "\u0120worthless", - "\u0120tailored", - "pu", - "Americans", - "\u0120piles", - "\u0120Monkey", - "\u0120basin", - "\u0120Temper", - "\u0120Paint", - "\u0120punching", - "\u0120baik", - "\u0120Oakland", - "vre", - "\u00c5\u0141allah", - "ydd", - "\u0120casually", - "odu", - "\u0120coded", - "\u0120Norwegian", - "\u0120Vince", - "\u0120premature", - "\u0120Promise", - "\u00d0\u00b5\u00d0\u00ba\u00d1\u0123\u00d1\u0124", - "\u0120devastated", - "\u0120Premium", - "\u0120Param", - "\u0120\u00c3\u0138yle", - "umuz", - "PO", - "rators", - "\u0120lamps", - "\u0120territorial", - "\u0120backbone", - "listed", - "DY", - "\u0120\u00d8\u00a7\u00d9\u0126\u00d8\u00b1", - "\u0120pursued", - "\u0120Commons", - "\u0120\u00ea\u00b3\u00a1", - "locks", - "edor", - "\u0120conceived", - "gere", - "\u0120disappearing", - "\u0120Sull", - "\u0120\u00ec\u0139\u00b0\u00eb", - "\u0120hoffe", - "\u0120detox", - "\u00ed\u0136\u012e", - "\u0120retir", - "\u0120\u00eb\u0123\u013f\u00eb\u0124", - "\u0120pergunta", - "\u0120BOY", - "\u00e7\u00b2\u00be", - "\u0120penn", - "\u00e6\u013f\u00a5\u00e4\u00ba\u0128", - "h\u00c3\u00a9s", - "hon", - "\u0120catastrophic", - "\u0120aust", - "\u0120torso", - "\u0120\u00ec\u0138\u00b4\u00eb\u012c\u0132", - "\u0120\u00ec\u0124\u00ac\u00eb\u0140\u012e\u00eb\u0135\u00a4\u00ec\u013f\u00b4", - "\u0120marvelous", - "\u0120Harley", - "achine", - "\u0120ti\u00e1\u00ba\u00bf", - "itto", - "\u0120I\u00c3\u0143m", - "ylon", - "\u0120shutdown", - ".''", - "\u0120apologies", - "\u0120Communication", - "\u0120\u00d0\u00b3\u00d0\u00be\u00d0\u00b2\u00d0\u00be\u00d1\u0122\u00d1\u0130", - "\u00e3\u0123\u0124\u00e3\u0125\u00bc", - "\u00e2\u0126\u00a2", - "\u00c3\u0143veis", - "acun", - "\u0120retaining", - "\u0120contradiction", - "\u0120ADAM", - "COM", - "Bryan", - "\u0120Monsieur", - "\u0120adapting", - "\u00d0\u00a8\u00d0\u0132", - "\u0120Scr", - "\u00c3\u00a4ndert", - "\u0120plaus", - "\u00e4\u00bb\u012c\u00e5\u00a4\u00a9\u00e7\u013c\u0126", - "\u0120onset", - "\u0120assistants", - "\u0120valves", - "\u0120scatter", - "\u0120Rust", - "awia", - "\u0120readiness", - "\u0120pais", - "\u0120bible", - "\u0120ambiente", - "\u0120\u00d0\u00b0\u00d0\u00bc\u00d0\u00b5\u00d1\u0122\u00d0\u00b8\u00d0\u00ba", - "\u0120uncond", - "\u0120kalk", - "\u00e5\u012c\u00a8", - "\u0120moc", - "unn", - "\u0120actu", - "\u0120humming", - "issimo", - "\u0120Patrol", - "gow", - "\u00e3\u0125\u00a4", - "\u0120THEY", - "\u0120Boden", - "\u0120Bie", - "\u0120reel", - "\u0120\u00d1\u0125\u00d1\u0123\u00d0\u00bb\u00d0\u00be\u00d0\u00b2", - "\u0120endeavor", - "\u0120Period", - "ustomed", - "mals", - "alon", - "Box", - "\u0120\u00cf\u0125\u00ce\u00b1\u00cf\u0124", - "\u0120omdat", - "\u0120altre", - "\u0120Heh", - "kad", - "\u0120protector", - "\u0120dominance", - "odynamic", - "\u0120communicated", - "k\u00c3\u00b6", - "\u0120predecessor", - "\u0120Luk", - "\u0120Flower", - "\u0120\u00e3\u0123\u00a9", - "poque", - "\u00d1\u0124\u00d0\u00b8\u00d1\u0122\u00d0\u00be\u00d0\u00b2", - "\u0120retrospect", - "\u0120decisive", - "\u0120exempel", - "{\\", - "\u0120R\u00c3\u00bcck", - "rite", - "\u0120Zeus", - "\u0120calorie", - "\u0120attractions", - "\u0120Hinter", - "\u0120uhm", - "\u0120\u00ed\u012e\u0132", - "\u0120rulers", - "\u0120discouraged", - "\u0120acontecer", - "\u0120accents", - "\u0120Optim", - "\u0120Alg", - "kids", - "2021", - "\u0120Lindsay", - "\u0120filmmakers", - "prowad", - "\u0120terug", - "\u00eb\u012d\u00b4", - "\u0120Sommer", - "2018", - "\u0120borrowing", - "\u0120Transfer", - "\u00d0\u00bd\u00d0\u00be\u00d0\u00bf", - "arias", - "\u0120headphone", - "\u00ec\u00bc\u013e", - "\u0120translating", - "\u0120aufge", - "\u00e0\u00ae\u00aa\u00e0\u00ae\u0141", - "weis", - "avant", - "paid", - "baby", - "\u0120toughest", - "\u0120repeats", - "\u0120Teresa", - "Lord", - "\u0120acabar", - "\u0120Ride", - "dir", - "\u0120leng", - "\u0120dwa", - "\u0120headaches", - "\u0120n\u00e1\u00bb\u00afa", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d1\u0123\u00d1\u0124\u00d0\u00be\u00d1\u0131\u00d1\u012b", - "\u0120boils", - "\u0120longing", - "rias", - "\u00c3\u00b3rio", - "\u0120Paradise", - "\u0120Se\u00c3\u00b1or", - "erdem", - "\u0120reinst", - "\u0120salaries", - "\u0120insecurity", - "\u00c5\u0124o\u00c5\u013dci", - "\u0120\u00d0\u00b0\u00d0\u00b1\u00d1\u0123\u00d0\u00be\u00d0\u00bb\u00d1\u0130\u00d1\u0124\u00d0\u00bd\u00d0\u00be", - "inken", - "\u0120Eddy", - "udos", - "\u0120dummy", - "\u00d0\u013c\u00d0\u00b0\u00d0\u00ba", - "six", - "\u0120inbox", - "\u00e1\u00ba\u00a9", - "People", - "\u00e1\u00bb\u0135ng", - "\u0120organizers", - "find", - "\u0120\u00c3\u00bcl", - "\u0120COM", - "\u00c5\u00bca", - "weile", - "Commentary", - "\u00ed\u012c\u00b8\u00eb\u00a5\u00bc", - "\u0120Mittel", - "kus", - "\u00e8\u013d\u012d", - "\u00e0\u00a4\u00a8", - "iral", - "\u0120garment", - "\u00ce\u00b9\u00ce\u00ba\u00ce\u00ac", - "\u0120stool", - "payers", - "\u0120shimmer", - "\u0120Ollie", - "\u0120Je\u00c5\u00bceli", - "\u00e8\u00bf\u013a\u00e6\u013e\u012b", - "\u01201977", - "\u0120jeux", - "\u0120extinct", - "\u0120Transportation", - "\u0120Maker", - "\u0120john", - "\u0120richest", - "\u0120traumat", - "\u0120liegen", - "\u00b4\u00eb\u00a5\u00bc", - "\u00e8\u00bf\u013b\u00e9\u0129\u012e", - "\u0120unrest", - "\u0120Straw", - "\u00e6\u012d\u013e\u00e6\u012d\u013e", - "\u0120coma", - "\u0120Kristen", - "\u0120\u00d0\u013c\u00d0\u00be\u00d0\u00bd\u00d0\u00b5\u00d1\u0129\u00d0\u00bd\u00d0\u00be", - "\u0120Bryce", - "\u0120\u00d1\u0131\u00d0\u00ba\u00d1\u0138", - "\u0120pearls", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00bd\u00d0\u00b8\u00d0\u00bc\u00d0\u00b0\u00d1\u0130", - "\u0120additions", - "\u0120asympt", - "\u0120\u00d0\u00bc\u00d0\u00b5\u00d0\u00bd\u00d1\u012e\u00d1\u012a\u00d0\u00b5", - "\u0120scans", - "Child", - "\u0120Hide", - "\u00d0\u00ba\u00d1\u0125\u00d1\u0130", - "etas", - "\u0120dank", - "\u0120pleas", - "\u0120essays", - "\u0120jets", - "\u00e5\u0127\u0134", - "\u0120\u00d0\u00b2\u00d0\u00b5\u00d0\u00b4", - "\u0120positives", - "hof", - "-)", - "zzo", - "\u0120starters", - "\u0120smiled", - "\u01201944", - "quiera", - "\u0120rok", - "\u0120puesto", - "Nico", - "\u0120simulations", - "\u0120\u00e0\u00b6", - "\u0120intrigued", - "\u0120Overwatch", - "\u00e5\u0138\u0124", - "sigh", - "bai", - "\u0120\u00eb\u00a7\u0132\u00ea\u00b3\u0142", - "id\u00c3\u00a9", - "\u0120crabs", - "\u00e1\u00ba\u0143p", - "\u0120Iraqi", - "\u00ec\u013f\u00b4\u00eb\u00a5\u00bc", - "\u00d1\u0124\u00d1\u0131", - "\u0120Sophia", - "\u0120DNS", - "\u0120\u00c3\u00b6nemli", - "\u0120Luo", - "\u013f\u00a4", - "\u0120Counsel", - "ligen", - "\u00d0\u00b0\u00d0\u00bd\u00d1\u012e\u00d1\u012a\u00d0\u00b5", - "\u0120trumpet", - "\u0120dapat", - "\u0120JM", - "\u0120EVERY", - "\u0120\u00e5\u00b0\u012f\u00e4\u00b8\u012f\u00e5\u00b0\u012f", - "\u00e5\u00a4\u00a2", - "\u0120Layer", - "\u0120c\u00c3\u00b4", - "\u00d0\u00bd\u00d0\u00b0\u00d0\u00bb", - "\u0120Joo", - "\u0120Hack", - "\u0120sunt", - "\u0120Leonard", - "\u0120Firebase", - "\u00c3\u00a4nger", - "\u0120exploding", - "voy", - "\u0120\u00ec\u00a6\u0132", - "\u0120\u00d1\u0123\u00d0\u00b5\u00d1\u0122\u00d1\u012e", - "\u0120severity", - "\u0120bestimm", - "\u00e7\u00b5\u0132\u00e6\u0140\u013e", - "\u0120tiring", - "\u0120procurement", - "\u0120diplomacy", - "\u0120decorative", - "\u0120\u00d9\u012c\u00d8\u00a7", - "\u0120penetration", - "\u00d5\u00ab", - "\u0120outright", - "ENE", - "\u0120Uni", - "odles", - "\u0120zeros", - "\u0120delightful", - "jm", - "\u0120dopo", - "\u00e6\u00b2\u00a1\u00e4\u00ba\u012d", - "\u0120positivity", - "\u0120VISTA", - "\u0120Resource", - "\u00ed\u0125\u0122\u00eb", - "\u00d1\u012a\u00d0\u00b8\u00d0\u00b5", - "Carl", - "\u0120piping", - "\u0120chopping", - "\u0120Ganze", - "\u00c3\u00bcss", - "\u0120Ao", - "\u0120shattered", - "\u0120Detective", - "\u0120undoubtedly", - "\u0120halluc", - "\u0120ench", - "\u00d1\u012d\u00d1\u0129\u00d0\u00bd\u00d0\u00be", - "\u00d1\u0125\u00d0\u00bb\u00d1\u0131\u00d1\u0122", - "isesti", - "\u0120pedals", - "\u0120durum", - "\u00a4\u00ed\u0136", - "laimer", - "\u0120propre", - "Cu", - "\u0120translator", - "\u0120ca\u00c5\u0124", - "\u0120\u00ea\u00b7\u00b8\u00ea\u00b1\u00b8", - "\u0120ca\u00c5\u0124y", - "UA", - "\u0120revised", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00b4\u00d0\u00be\u00d0\u00b1", - "\u0120Article", - "\u0120Haiti", - "\u0120\u00c3\u0135", - "\u0120Ctrl", - "\u0120rozm", - "lait", - "\u0120letzte", - "ispering", - "display", - "\u0120aluminium", - "\u0120palabras", - "\u0120conocer", - "\u0120zitten", - "\u0120dirig", - "\u00e5\u0131\u00aa\u00e6\u013e\u012b", - "\u0120brainstorm", - "\u0120wifi", - "\u0120Particip", - "\u0120viewpoint", - "\u0120Quan", - "\u0120hierarch", - "Welcome", - "\u00e5\u00af\u00be", - "\u0120offen", - "\u0120Recovery", - "gano", - "Would", - "\u0120repro", - "\u0120perceptions", - "\u0120demasi", - "\u0120Bangladesh", - "\u0120Incredible", - "\u0120letzt", - "\u0120behaving", - "\u0120astonishing", - "\u0120\u00e2\u0128", - "\u0120\u00eb\u0124\u00a8\u00ec\u0140\u0132", - "\u00e8\u00b5\u00b0\u00e4\u00ba\u0128", - "\u00e3\u0125\u0136", - "\u0120GORDON", - "CAR", - "?!\"", - "\u0120Prest", - "\u0120\u00eb\u00a7\u0140\u00ec\u0137\u0126\u00ec\u013c\u0136", - "\u0120tand", - "\u0120lash", - "\u00e7\u012c", - "ificant", - "\u0120intoler", - "\u0120\u00d0\u00b3\u00d0\u00b5\u00d1\u0122\u00d0\u00be", - "\u0120teu", - "aso", - "\u0120\u00d1\u0123\u00d0\u00be\u00d0\u00b2\u00d0\u00b5\u00d1\u0124", - "\u0120travelers", - "\u0120Synd", - "\u0120\u00d0\u00b2\u00d0\u00b5\u00d1\u0122\u00d1\u0123", - "Fonda", - "ad\u00c4\u00b1", - "\u0120transcription", - "\u0120titanium", - "\u0120twists", - "\u0120gearbox", - "ensation", - "fat", - "Coll", - "\u0120Commonwealth", - "zon", - "\u0120Polizei", - "\u0120APPLAUSE", - "fry", - "\u0120Juda", - "esteem", - "\u0120sock", - "\u0120Jugend", - "\u0120\u00d0\u00ba\u00d1\u0123\u00d1\u0124\u00d0\u00b0\u00d1\u0124\u00d0\u00b8", - "\u0120Dro", - "\u0120prochaine", - "\u00e3\u0125\u00bc\u00e3\u0125\u00ab", - "\u0120liksom", - "\u0120Energie", - "\u0120Marina", - "\u0120230", - "\u0120\u00ea\u00b0\u0122\u00ec\u0126\u013e", - "umping", - "\u0120lone", - "\u00e7\u00b4\u013c", - "\u0120fonts", - "\u0120businessman", - "\u0120ply", - "\u0120doe", - "grid", - "\u0120Milwaukee", - "\u0120Eden", - "!\".", - "\u0120\u00db\u012e\u00db\u0123", - "ogens", - "\u0120teaser", - "\u0120qui\u00c3\u00a9n", - "\u0120incentiv", - "govern", - "\u0120childcare", - "\u0120sneakers", - "\u0120imprisoned", - "\u00c2\u00ae", - "\u00d0\u00b8\u00d1\u0124\u00d0\u00b5\u00d1\u0123\u00d1\u012e", - "anbul", - "\u0120regain", - "\u0120tranquil", - "Redner", - "\u00e9\u013d\u00a8", - "IFA", - "\u0120ideological", - "\u0120mayor\u00c3\u0143a", - "\u0120bureau", - "eterm", - "\u0120DID", - "\u00ec\u012c\u00b7", - "\u0120waving", - "\u0120beb", - "\u0120\u00c3\u00a1r", - "\u0120\u00d0\u00ba\u00d0\u00b2", - "\u0120envoy", - "anut", - "\u00d0\u00b8\u00d0\u00ba\u00d1\u0125", - "\u0120Environment", - "\u0120Assass", - "\u00e3\u0124\u0135\u00e3\u0123\u00a7", - "\u0120Bread", - "\u0120\u00d0\u00a2\u00d1\u0125\u00d1\u0124", - "\u0120staircase", - "\u0120Disease", - "\u0120aucun", - "\u0120\u00eb\u012d\u012a", - "\u0120confrontation", - "\u01201941", - "\u0120irony", - "\u0120worsh", - "\u00e3\u0124\u012e\u00e3\u0124\u012d", - "\u0120fick", - "\u0120Naomi", - "\u0120backside", - "ieux", - "Kap", - "\u0120vedere", - "\u0120lengthy", - "\u0120breaker", - "\u0120Rolle", - "\u0120predator", - "\u0120nossos", - "\u0120advertise", - "\u00e8\u00b3\u0129", - "\u00d1\u0122\u00d0\u00be\u00d0\u00b4\u00d0\u00b5", - "Rednerwechsel", - "reten", - "\u0120collectors", - "\u00c4\u00b1\u00c4\u0141\u00c4\u00b1m\u00c4\u00b1z", - "\u0120trig", - "\u0120axes", - "inters", - "\u0120penalties", - "\u0120Osman", - "\u0120Jenna", - "\u0120flakes", - "\u0120trainers", - "\u0120stunned", - "\u0120Scroll", - "\u0120Pip", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d1\u0123\u00d1\u0124", - "\u0120nh\u00c3\u0142", - "\u0120Smack", - "\u00e1\u00ba\u00abn", - "ratos", - "\u0120\u00d1\u0122\u00d0\u00b0\u00d0\u00b1\u00d0\u00be\u00d1\u0124\u00d1\u012d", - "\u0120ucz", - "\u0120Lemon", - "\u0120Sind", - "\u0120psychic", - "\u0120Abg", - "\u0120mammals", - "\u0120immersive", - "\u0120bots", - "\u0120verschiedene", - "\u0120geral", - "\u0120follower", - "\u0120\u00e4\u00bb\u0138", - "\u0120seguridad", - "\u0120immersed", - "feito", - "cross", - "\u0120\u00c3\u00b6ld", - "\u00ed\u0125\u0126", - "\u0120\u00e3\u0123\u0135\u00e3\u0123\u00ae", - "\u0120\u00d7\u0136\u00d7\u013b\u00d7\u0132", - "\u0120Jian", - "\u0120biliyor", - "area", - "\u0120kaf", - "\u0120godt", - "\u00e7\u013d\u00b8\u00e4\u00bf\u00a1", - "\u0120\u00eb\u00b0\u00a9\u00ec\u0128\u00a1", - "\u0120detriment", - "\u00e6\u00a5\u013c", - "\u00d1\u0138\u00d0\u00bb", - "\u0120\u00c4\u0133\u00c3\u00a2u", - "\u0120chloride", - "\u00c3\u00b8re", - "lei", - "\u0120monte", - "\u0120diff\u00c3\u00a9rentes", - "\u00e0\u00af\u0123.", - "\u0120caregivers", - "\u0120inadequ", - "\u0120farewell", - "\u0120\u00d1\u0124\u00d0\u00b8\u00d0\u00bf\u00d0\u00b0", - "ontec", - "\u0120Eph", - "HHH", - "\u0120Todos", - "\u0120\u00d0\u00a1\u00d0\u00a8\u00d0\u0132", - "\u0120trov", - "\u0120lige", - "\u0120c\u00c3\u00b4ng", - "\u0120Civ", - "\u0120capaz", - "\u0120Vallahi", - "\u0120queste", - "\u0120replica", - "\u00d8\u00b3\u00d8\u00a8", - "zna", - "\u0120\u00d1\u0123\u00d0\u00bb\u00d1\u0125\u00d0\u00b6", - "\u0120PT", - "wave", - "ieni", - "\u0120relied", - "develop", - "\u0120deme", - "\u0120Aman", - "\u0120[...]", - "\u0120compliments", - "uais", - "\u0120\u00ed\u012e\u00a8", - "\u0120smelling", - "\u0120dadurch", - "\u00d9\u012a\u00d8\u00aa", - "\u0120oranges", - "\u0120\u00d0\u00bb\u00d0\u00b0\u00d0\u00b9", - "\u0120stabilization", - "\u00e5\u0122\u012f", - "\u00e3\u0124\u012e\u00e3\u0123\u0141", - "\u00e6\u00a5\u00bd", - "\u0120appliances", - "\u0120hm", - "\u0125\u0132\u00eb\u00a9\u00b4", - "odynamics", - "\u0120ci\u00c4\u013b", - "\u0120Cott", - "MON", - "\u0120Mang", - "\u00e6\u0136\u00af\u00e6\u012e\u0123", - "\u0120allerdings", - "\u00ce\u00b9\u00ce\u00ba\u00ce\u00ae", - "shots", - "\u0120ts", - "\u0120G\u00c3\u00b6r", - "\u0120CHAR", - "\u0120:(", - "\u0120wrath", - "\u0120fique", - "\u0120f\u00c3\u00bchren", - "\u0120testament", - "\u0120^^", - "\u00e1\u00b9\u013d\u00e1\u00b9\u00a3\u00e1\u00b9\u0129a", - "ALD", - "\u0120texto", - "\u0120Dogs", - "\u0120sib", - "\u0120pathetic", - "ocks", - "\u0120radically", - "\u0120MORE", - "\u0120JAMES", - "\u0120ingl", - "\u0120Technical", - "\u0120porch", - "\u0120UT", - "\u0120\u00d0\u00be\u00d0\u00b1\u00d1\u0131\u00d0\u00b7\u00d0\u00b0\u00d1\u0124\u00d0\u00b5\u00d0\u00bb\u00d1\u012e\u00d0\u00bd\u00d0\u00be", - "\u0120renewal", - "\u0120aesthetics", - "ikum", - "\u0120beverage", - "dern", - "\u0120predictive", - "\u0120chuy", - "\u0120Regarding", - "\u0120Forward", - "\u0120\u00d9\u012a\u00d9\u0126", - "\u0120contextual", - "\u0120dwarf", - "\u0120prehe", - "\u0120governed", - "\u0127\u0126", - "\u0120trabalhar", - "\u0120neg\u00c3\u00b3cio", - "\u0120\u00d0\u00b1\u00d0\u00be\u00d0\u00bb\u00d1\u012e\u00d1\u012a\u00d0\u00be\u00d0\u00b9", - "\u00d0\u00b5\u00d1\u0129\u00d0\u00b0\u00d1\u0124", - "\u0120\u00d0\u00b4\u00d1\u0125\u00d1\u0127", - "\u0120floods", - "\u0120bowling", - "\u0120OB", - "\u0120H\u00c3\u00a4r", - "\u0120grading", - "\u00ec\u00a3\u00bc\u00eb\u012c\u0136", - "\u0120gars", - "dling", - "\u0120rak", - "\u00eb\u012a", - "creat", - "\u0120\u00d1\u012b\u00d0\u00b5", - "\u0120neighbours", - "food", - "Query", - "\u0120heroin", - "iceps", - "\u0120Kinda", - "NET", - "\u0120mari", - "\u0120imitate", - "\u0120achter", - "\u0120settlements", - "rare", - "cciones", - "\u0120\u00eb\u0135\u013e", - "\u0120fik", - "itung", - "\u0120\u00d0\u00bc\u00d0\u00b0\u00d0\u00ba\u00d1\u0123\u00d0\u00b8\u00d0\u00bc", - "\u0120elf", - "\u0120dalla", - "\u0120Polsce", - "\u0120Pul", - "\u00d0\u00a7\u00d1\u0124\u00d0\u00be", - "\u0120Morgen", - "\u00d8\u0143\u00d9\u0127", - "\u0120supremacy", - "\u0120kys", - "\u0120Hurricane", - "\u0120GTA", - "\u0120Feh", - "\u0120finalmente", - "mund", - "\u0120Krie", - "\u00c3\u00a9poque", - "\u0120Tucker", - "ITT", - "\u0120lur", - "\u0120dipping", - "\u00c3\u00a4v", - "\u0120eerste", - "\u0120Flint", - "bildung", - "\u00e0\u00b8\u00b9\u00e0\u00b9\u012b", - "\u0120toim", - "\u0120pracy", - "\u0120transforms", - "\u0120speeding", - "\u0120presenter", - "\u0120fellows", - "filled", - "ieza", - "\u0120advising", - "\u0120Interview", - "\u00d0\u00b8\u00d0\u00b3\u00d1\u0122", - "wehr", - "\u0120Dante", - "pture", - "\u012a\u00eb\u00ac\u00b8", - "\u00af\u00b8\u00eb", - "\u0132\u0132", - "\u0120Counter", - "\u0120crist", - "\u0120\u00ec\u00a7\u013e", - "\u0120jeune", - "\u0120\u00d1\u0123\u00d1\u0124\u00d1\u0122\u00d0\u00b0\u00d1\u012a", - "\u0120mie\u00c4\u0129", - "\u0120tutor", - "\u0120masala", - "\u0120powdered", - "\u0120nau", - "\u0120Frederick", - "\u0120billing", - "\u0120Eisen", - "\u0120\u00d0\u00b4\u00d0\u00be\u00d0\u00b1\u00d1\u0122", - "\u0120mest", - "\u00e6\u00bd", - "\u0120snipp", - "\u0120mono", - "\u0120Alo", - "\u0120Mercy", - "\u00c3\u00a9rience", - "\u0120casualties", - "\u0120ANNOUNCER", - "\u00e4\u00bb\u0130", - "\u0120tocar", - "\u0120bacterial", - "Ho", - "\u0120streak", - "\u0120JENN", - "\u0120plast", - "\u00d1\u0123\u00d0\u00bb\u00d0\u00b5\u00d0\u00b4", - "\u0120reapp", - "\u0120paycheck", - "\u0120miners", - "habt", - "\u0120Jap", - "\u00d0\u00bd\u00d1\u0125\u00d1\u0124", - "\u0120redemption", - "\u0120quir", - "hnlich", - "\u0120accumulation", - "\u0120shove", - "\u0120adrenaline", - "Make", - "\u0120Hern", - "ossing", - "\u0120Vil", - "ubby", - "hertz", - "breaks", - "\u0120spur", - "\u0120Daha", - "USTIN", - "\u0120continuer", - "\u0120Saul", - "\u00e3\u0123\u00ae\u00e3\u0123\u00af", - "\u0120\u00ed\u0131\u0143", - "\u0120\u00eb\u0132\u013a\u00eb\u00a9\u00b4", - "\u0120\u00eb\u00a7\u0132\u00ec\u0136\u0122", - "\u0120\u00d0\u00be\u00d0\u00b6", - "\u0120suspects", - "\u0120laquelle", - "\u0120Muchas", - "\u0120v\u00c3\u00b6llig", - "ulen", - "\u0120impres", - "\u0120lobb", - "enee", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d0\u00b6", - "Ta", - "\u0120r\u00c3\u00a9alit\u00c3\u00a9", - "\u0120Rex", - "\u0120harvesting", - "\u0120estr", - "\u00e6\u00b6", - "ospace", - "OSS", - "\u0120disturbance", - "assic", - "\u0120Isab", - "\u0120d\u00c3\u00a9couv", - "\u0120Hampshire", - "\u0120ornament", - "\u0120lu\u00c3\u00b4n", - "\u0120UW", - "\u0120j\u00c4\u0127", - "\u00e9\u0124\u00a3\u00e4\u00b9\u012a", - "\u0120respecto", - "\u0120comunidad", - "\u0120comigo", - "agna", - "\u0120intrinsic", - "\u0120Alumni", - "\u0120sesleri", - "\u0120estimation", - "\u00e2\u0122\u0136\u00e2\u0122\u0136", - "\u0120produit", - "\u00e3\u0122\u0124\u00e3\u0122\u012f", - "\u0120\u00d0\u00b2\u00d1\u0122", - "\u0120whirl", - "\u0120acces", - "\u00c3\u00a7u", - "\u0120variability", - "\u0120vodka", - "itsu", - "\u0120internships", - "\u0120allocate", - "RR", - "\u00ed\u013d\u012a", - "\u0120instructional", - "tant", - "\u0120\u00e0\u00ae\u0127\u00e0\u00ae\u00a4", - "\u0120invites", - "\u0120hak", - "\u0120scares", - "\u0120eclipse", - "\u00d0\u00bf\u00d0\u00be\u00d0\u00b2", - "\u00d0\u00ba\u00d0\u00be\u00d0\u00bb\u00d1\u012e", - "ativas", - "\u0120stabbed", - "\u0120DOM", - "\u00e4\u00b8\u012f\u00e5\u012a\u00b0", - "roots", - "\u0120Picture", - "\u00ed\u013a\u00bc", - "\u0120CHA", - "iec", - "\u00c4\u00b1\u00c4\u00b1", - "hanol", - "\u0120misunderstand", - "Ray", - "\u0120roadmap", - "ocumented", - "izione", - "\u0120Olive", - "rift", - "\u0120\u00d7\u0136\u00d7\u0142", - "\u00e6\u00af\u012f", - "lest", - ";;", - "\u0120EA", - "\u00e9\u013e\u0122\u00e8\u00a6\u0123", - "\u00d0\u00be\u00d0\u00b4\u00d1\u0125", - "\u0120hobbies", - "\u0120burial", - "\u00e3\u0123\u00ab\u00e3\u0123\u00a1\u00e3\u0123\u00af", - "\u00d0\u00a4", - "lege", - "\u0120HJ", - "\u0120objection", - "\u0120\u00e3\u0123\u0143", - "ctory", - "\u0120incremental", - "\u0120gymn", - "\u0120epidemi", - "\u00d1\u0123\u00d1\u012d\u00d0\u00bb", - "\u00c3\u0133", - "\u0120advancement", - "\u0120parch", - "News", - "\u0120ayr", - "\u00d0\u00bb\u00d0\u00b0\u00d0\u00bc", - "\u0120\u00d7\u013e\u00d7\u00a9", - "\u0120diploma", - "\u00e3\u0123\u00a1\u00e3\u0124\u0125\u00e3\u0124\u0135", - "\u0120robbed", - "Only", - "\u0120incur", - "\u0120chanting", - "\u0120\u00ed\u0137\u00b4\u00eb\u0131\u0126", - "\u0120riches", - "\u0120Carmen", - "\u0120nostro", - "\u00ce\u00bb\u00ce\u0143", - "\u0120Powder", - "\u00e0\u00b9\u0122\u00e0\u00b8\u00ab", - "\u0120\u00ec\u0140\u012a\u00ec\u013e\u00bc\u00eb\u00a9\u00b4", - "\u0120ger\u00c3\u00a7ekten", - "\u0120Pikachu", - "\u00d0\u00b5\u00d0\u00bc\u00d0\u00be\u00d0\u00bd", - "OLL", - "\u0120planetary", - "\u0120slows", - "\u0120clockwise", - "alion", - "\u0120\u00ec\u012e", - "\u0120vern", - "\u0120homme", - "\u0120endpoint", - "\u0120innocence", - "\u0120elementos", - "\u0120sophomore", - "\u0120notions", - "\u0120Couldn", - "pur", - "\u0120zat", - "\u0120obsess", - "\u0120motivo", - "\u0120Kub", - "\u0120Drug", - "Ant", - "\u0120Players", - "\u0120Humans", - "\u0120melee", - "\u0120Wildlife", - "\u0120VP", - "\u0120volcanic", - "\u0120comin", - "\u0120Guang", - "\u0120\u00cf\u0126\u00ce\u00b9\u00cf\u0124", - "\u0120\u00d0\u00be\u00d1\u0123\u00d0\u00be\u00d0\u00b1\u00d0\u00b5\u00d0\u00bd\u00d0\u00bd\u00d0\u00be", - "\u0120Size", - "Listen", - "\u0120Aaa", - "appro", - "\u0120barbar", - "\u0120Parkinson", - "\u00d0\u00bd\u00d1\u0131\u00d1\u0124\u00d1\u012e", - "\u00e5\u012f\u00b0", - "\u0120underestimate", - "\u0120substitution", - "\u0120cosmetic", - "\u00e4\u00b8\u012d\u00e6\u00ac\u00a1", - "\u0120willen", - "\u0120beide", - "anni", - "\u0120conditioned", - "\u0120Debbie", - "\u0120isto", - "\u0120Edwards", - "\u00ec\u013d\u012e\u00ec\u013c\u0136", - "\u0120\u00d1\u0124\u00d0\u00be\u00d0\u00b2", - "\u0120abbrevi", - "\u0120M\u00c3\u00bcn", - "\u0120Princ", - "\u0120Liang", - "\u0120stink", - "\u0120radioactive", - "\u00e3\u0123\u0128\u00e3\u0124\u0131", - "\u0120acontec", - "\u0120uncon", - "\u0120Turbo", - "\u00e3\u0123\u0132", - "\u0120kisses", - "\u00e6\u013a\u00af\u00e4\u00bb\u0122\u00e9\u00ba\u00bc", - "\u00d0\u00b5\u00d1\u0124\u00d1\u0122\u00d0\u00be\u00d0\u00b2", - "\u0120frontier", - "\u0120Spy", - "\u0120Belarus", - "\u0120CBS", - "\u00e1\u00bb\u0139", - "amoto", - "\u00ed\u0137\u013e\u00eb\u012f\u00b0", - "\u0120\u00d1\u0123\u00d1\u0124\u00d1\u0122\u00d0\u00be", - "\u0120Enfin", - "\u0120breadth", - "\u00e9\u013a\u00b2", - "\u0120Cafe", - "\u0120Daf\u00c3\u00bcr", - "\u0120Bour", - "aras", - "\u0120blueprint", - "an\u00c4\u00b1", - "\u0120constants", - "\u0120attacker", - "\u0120Formula", - "za\u00c4\u0129", - "\u0120sowie", - "\u0120eyebrow", - "obook", - "\u0120setzen", - "\u00e7\u00ac\u00ac\u00e4\u00b8\u012b", - "onsider", - "awning", - "\u0120s\u00c3\u00b6yleye", - "\u0120invaded", - "\u0120pronouns", - "\u0120dobry", - "Si", - "\u0120\u00d0\u00a5\u00d0\u00be\u00d1\u0124", - "\u0120volleyball", - "\u0120lament", - "isches", - "arme", - "api", - "\u0120Wiki", - "\u00d0\u00bb\u00d0\u00b8\u00d1\u012a", - "\u0120kasih", - "\u0120pess", - "\u0120\u00d1\u0126\u00d0\u00be\u00d1\u0124", - "\u0120Sul", - "\u00e5\u00be\u00b7", - "\u0120pseudo", - "\u0120memo", - "\u0120\u00ec\u0139\u00b0\u00ec\u012c\u00b5", - "\u0120\u00d0\u00b4\u00d0\u00be\u00d0\u00bb\u00d0\u00bb\u00d0\u00b0\u00d1\u0122\u00d0\u00be\u00d0\u00b2", - "\u0120\u00d0\u00bf\u00d0\u00b5\u00d1\u0122\u00d0\u00b5\u00d0\u00bc", - "\u0120Reach", - "miral", - "alted", - "\u0120statut", - "reading", - "\u0120s\u00c3\u00b6yled", - "\u0120Lindsey", - "\u0120Ahmad", - "\u00eb\u00b6\u0122\u00eb", - "\u0120\u00d0\u00a1\u00d0\u00b5\u00d0\u00b3\u00d0\u00be\u00d0\u00b4\u00d0\u00bd\u00d1\u0131", - "\u0120przygot", - "\u0120hyster", - "URE", - "\u0120Neigh", - "Reporter", - "\u0120Bunu", - "\u0120Treaty", - "\u0120Rank", - "\u0120Fame", - "inished", - "\u0120geared", - "\u0120compose", - "odia", - "\u0120Lon", - "\u0120jeste\u00c5\u013dmy", - "\u0120DIRECTOR", - "\u0120elkaar", - "\u0120Viel", - "\u00d7\u0132\u00d7\u00a9", - "ynthia", - "\u00e4\u00b8\u00a6", - "\u0120m\u00c3\u00a8re", - "\u0120Tomato", - "\u0120exatamente", - "ni\u00c4\u013b", - "\u0120Frei", - "\u0120Dif", - "\u0120openings", - "\u0120graphical", - "\u0120\u00d1\u0125\u00d0\u00b4\u00d0\u00be\u00d0\u00b1", - "\u0120\u00d0\u00b2\u00d1\u0123\u00d0\u00bf", - "\u0120Weekly", - "\u00d0\u00b5\u00d0\u00b2\u00d0\u00b0", - "\u0120hangs", - "\u0120unsafe", - "\u0120emblem", - "\u0120Kolleginnen", - "alay", - "\u0120ksi", - "\u0120hides", - "\u0120olmay", - "\u0120entste", - "\u0120arthritis", - "\u00c3\u0141erdem", - "\u0120binnen", - "\u0120listens", - "\u0120Hess", - "\u00e5\u0128\u012f\u00e4\u00be\u0128", - "\u0120Louise", - "lden", - "\u00d0\u00b5\u00d0\u00bd\u00d1\u0123", - "\u0120Version", - "\u0120Agriculture", - "\u00ec\u012c\u00a4\u00eb\u00a5\u00bc", - "\u00d0\u00bc\u00d0\u00b0\u00d0\u00bd", - "\u00eb\u0126\u00a4\u00ec\u013c\u0136", - "\u0120wines", - "\u0120INF", - "rul", - "\u0120JK", - "\u00c4\u00b1yorlar", - "shield", - "reath", - "\u0120terus", - "\u0120Lum", - "\u0120anticipation", - "\u0120accustomed", - "\u0120Mina", - "\u0120wield", - "io\u00c3\u00a8", - "mera", - "\u0120countdown", - "\u0120cling", - "\u0120commend", - "\u0120faktiskt", - "\u0120defenses", - "\u0120cockpit", - "\u0120\u00d0\u00ba\u00d0\u00be\u00d0\u00bc\u00d0\u00b0\u00d0\u00bd\u00d0\u00b4", - "\u0120dishwas", - "\u0120Thanos", - "\u0120kidneys", - "\u0120sehe", - "\u0120microbes", - "\u0120cuff", - "\u0120\u00d0\u00b2\u00d1\u012d\u00d1\u0123\u00d0\u00be\u00d0\u00ba", - "\u0120Spicy", - "\u00e7\u0143\u012b\u00e7\u0143\u012b", - "\u00e0\u00ae\u00b5\u00e0\u00ae\u00b0", - "culus", - "orc", - "\u00e7\u00be\u0127", - "ixes", - "\u0120Credit", - "\u0120raj", - "\u0120bringt", - "\u0120Niss", - "\u0120grim", - "\u0120SOL", - "\u0120tenim", - "\u0120Sudan", - "\u0120Spart", - "\u0120promotes", - "\u0120Nossa", - "\u0120\u00d1\u0123\u00d0\u00be\u00d1\u0123\u00d1\u0124\u00d0\u00be\u00d1\u0131\u00d0\u00bd\u00d0\u00b8", - "\u0120\u00ec\u00b0\u00a9", - "\u0120uncont", - "\u0120Liberal", - "\u0120\u00d0\u00a2\u00d0\u00be\u00d0\u00bb\u00d1\u012e\u00d0\u00ba\u00d0\u00be", - "\u0120Viele", - "\u0120kt\u00c3\u00b3rej", - "\u0120****", - "Max", - "\u0120\u00d0\u00a7\u00d1\u0124\u00d0\u00be\u00d0\u00b1\u00d1\u012d", - "350", - "\u0120\u00ed\u013a\u00bc\u00ec\u0140\u0132", - "\u0120\u00eb\u00b6\u0126\u00eb\u0135\u00a4\u00ec\u013f\u00b4", - "\u0120warp", - "\u0120tenga", - "\u0120sympathetic", - "\u0120bizi", - "\u0120Zack", - "iedo", - "\u0120\u00eb\u012b\u00b4\u00ec", - "piel", - "\u0120\u00d1\u0124\u00d0\u00be\u00d0\u00bb", - "\u0120scaled", - "\u0120PETER", - "\u0120COMM", - "\u0120Came", - "\u0120catastrophe", - "\u0120sweaty", - "igration", - "\u0120stuffing", - "\u0120\u00cf\u0122\u00ce\u00bf\u00ce\u00bb\u00cf\u012f", - "\u0120Driver", - "zyst", - "Tech", - "\u0120assessed", - "\u0120Surface", - "\u00c4\u00b1r\u00c4\u00b1m", - "sur", - "lerweile", - "\u0120\u00d0\u00b4\u00d0\u00be\u00d0\u00b3", - "\u0120shutting", - "\u0120fractions", - "\u0120\u00d1\u0123\u00d0\u00be\u00d0\u00bb", - "everyone", - "\u0120ern", - "\u0120\u00d0\u013f\u00d0\u00be\u00d0\u00b2", - "\u0120defenders", - "\u0120versucht", - "\u00e3\u0125\u00b3\u00e3\u0125\u0122", - "\u0120polity", - "\u0120\u00d0\u0141\u00d0\u00be\u00d0\u00bd", - "verst\u00c3\u00a4nd", - "\u0120browsers", - "\u0120transformative", - "\u0120dictate", - "\u0120LEGO", - "\u0120ninguna", - "\u00ea\u00b4\u0133", - "\u0120pizz", - "\u0120Harold", - "\u0120Lopez", - "\u00da\u00be\u00db\u012e", - "an\u00c4\u00b1z", - "atchet", - "\u00d9\u012c\u00d8\u00aa", - "\u0120lernen", - "\u0120\u00ea\u00b7\u0122\u00ec\u0139\u00ac", - "\u0120housed", - "\u0120cleanse", - "\u0120WAT", - "laration", - "\u0120bytes", - "\u0120tucked", - "\u0120faults", - "\u00d0\u00b4\u00d0\u00be", - "FX", - "\u0120\u00ec\u0138\u00bc\u00eb\u00a7\u012a\u00eb\u0124\u013a", - "\u0120deform", - "\u0120contracting", - "\u0120TIME", - "irse", - "\u0120neben", - "\u0120cerc", - "\u0120Armstrong", - "\u0120tester", - "\u0120parfait", - "\u0120jealousy", - "\u0120toxins", - "\u0120disbel", - "\u00d1\u0125\u00d1\u0122\u00d1\u012d", - "impression", - "\u0120prostate", - "\u0120firewall", - "\u0120classics", - "\u00d0\u00b5\u00d1\u0129\u00d1\u012e", - "\u0120socialism", - "\u0120gracious", - "\u0120\u00d1\u0123\u00d0\u00bd\u00d0\u00be\u00d0\u00b2\u00d0\u00b0", - "\u0120\u00d0\u00b4\u00d0\u00bd\u00d1\u0131", - "\u0120burner", - "\u0120Minor", - "\u0120\u00ec\u013c\u00b0\u00eb\u00a6\u00ac\u00eb", - "\u0120jedes", - "\u0120continuum", - "\u0120hots", - "\u0120occurrence", - "\u0120administered", - "\u0120\u00d0\u00b7\u00d0\u00b0\u00d0\u00bc\u00d0\u00b5\u00d1\u0124", - "\u0120hesitation", - "\u0120drills", - "erca", - "\u0120\u00d0\u00b2\u00d1\u0124\u00d0\u00be\u00d1\u0122\u00d0\u00be\u00d0\u00b9", - "\u0120steadily", - "\u0120insanlar", - "\u0120ihan", - "\u00ed\u0133", - "\u0120helper", - "\u0120Senin", - "\u00e5\u0123\u013e", - "\u00d0\u00be\u00d0\u00b2\u00d0\u00b0\u00d0\u00bd\u00d0\u00b8\u00d0\u00b5", - "\u0120ERIC", - "bla", - "\u0120Academic", - "\u0120humanities", - "black", - "umpy", - "ortex", - "\u0120\u00ec\u0142\u012a\u00eb", - "\u0120\u00d8\u00a5\u00d9\u0128", - "\u0120disclose", - "\u0120Elijah", - "\u0120\u00ce\u00bb\u00ce\u0143", - "\u0120Quer", - "\u00d8\u00a8\u00d9\u0126", - "\u00e3\u0124\u00a1", - "Tell", - "arle", - "\u00d1\u0138\u00d1\u0122", - "\u0120augmented", - "\u0120\u00eb\u00b9\u0126\u00ec\u012c\u00b7", - "\u0120android", - "\u00e0\u00a4\u00a4", - "arma", - "\u0120szer", - "geord", - "\u0120geek", - "\u0120yeux", - "\u0120pong", - "\u0120\u00e3\u0123\u013f\u00e3\u0123\u0128", - "\u0120tortured", - "\u0120Bath", - "zig", - "asonable", - "\u0120nets", - "\u0120baru", - "\u0120Flat", - "\u0120Vater", - "\u0120Terror", - "\u0120Avo", - "\u0120ceremonies", - "roe", - "\u00d9\u0123\u00d8\u00b3", - "Ops", - "\u0120hyvin", - "\u0120apresent", - "olor", - "\u0120\u00d0\u00b8\u00d0\u00b3\u00d1\u0122\u00d1\u012d", - "orton", - "\u0120\u00ea\u00b7\u00b8\u00eb\u0140\u00ac", - "\u0120lookin", - "\u0120TY", - "\u0120Mint", - "Add", - "\u0120mite", - "\u0120Smoke", - "\u0120nota", - "\u0120moss", - "\u0120Abend", - "\u0120\u00ec\u00bb\u00a8", - "\u0120exaggerated", - "fires", - "\u0120redist", - "ffiti", - "\u0120openness", - "\u00ea\u00b0\u0132\u00ec\u013f\u00b4", - "endeu", - "\u00d0\u00b5\u00d0\u00bd\u00d0\u00bd\u00d0\u00be\u00d0\u00b9", - "Watch", - "\u0120avatar", - "\u0120Pey", - "urun", - "\u0120senza", - "\u0120\u00ec\u00a7\u0122\u00ec\u0139\u0143", - "\u0120Natomiast", - "\u0120emergence", - "rays", - "\u0120crafted", - "gary", - "\u00e3\u0123\u0142\u00e3\u0123\u0133", - "\u00c3\u00bcng", - "-\"", - "\u0120hacked", - "\u0120stray", - "encie", - "emo", - "\u0120comen", - "\u0120K\u00c4\u00b1z", - "\u0120Jasmine", - "\u0120Hindi", - "manas", - "\u0120infinitely", - "emon", - "\u00ec\u013f\u00b8\u00eb\u012f\u00b0\u00ec\u013c\u0136", - "jak", - "\u0120roaring", - "\u00c3\u00a9rique", - "sweise", - "\u0120Rolex", - "\u00e5\u0142\u00b1\u00e5\u00b0\u0130", - "\u0120Stuart", - "bnb", - "\u0120diagnose", - "\u0120coherent", - "\u0120MJ", - "\u00e6\u00ba\u0138\u00e5\u0124\u013b", - "\u0120pike", - "lav", - "\u0120orchestral", - "\u00d0\u00b0\u00d1\u0123\u00d1\u0124\u00d0\u00b8", - "\u0120terminar", - "\u0120gatherings", - "\u0120compliant", - "\u0120upgrading", - "\u0120regulator", - "\u0120lan\u00c3\u00a7", - "\u00e9\u0122\u00a3", - "\u0120merchants", - "tawa", - "\u0120monitored", - "\u0120rendre", - "\u00e4\u00b8\u00a4", - "\u0120unterwegs", - "anguard", - "gard", - "\u0120Below", - "duino", - "\u0120\u00d0\u00a6\u00d0\u00b5", - "\u0120impedance", - "\u00ec\u013e\u00a1", - "\u00e4\u00bb\u00bd", - "\u0120aktuell", - "\u0120Vatic", - "\u00e5\u0143\u00a9", - "\u0120stewards", - "\u0120brightest", - "\u0120kenn", - "\u0120kau", - "\u0120Matrix", - "\u0120Bark", - "\u0120\u00f0\u0141\u0133", - "\u0120taper", - "\u0120casino", - "\u00d7\u00a8\u00d7\u0136", - "ysical", - "\u0120builders", - "\u0120cz\u00c5\u0124owie", - "\u0120Nepal", - "\u0120!\"", - "\u0120terme", - "\u0120innych", - "\u0120maths", - "\u0120drafted", - "\u0120Balk", - "\u0120hesitant", - "\u0120voltar", - "\u0120revive", - "\u0120\u00d1\u0126\u00d0\u00b8\u00d0\u00bb\u00d1\u012e\u00d0\u00bc\u00d0\u00b0", - "\u0120assassin", - "\u0120Solutions", - "\u0120duel", - "\u0120bearings", - "\u00e0\u00b8\u0126\u00e0\u00b8\u00b0", - "\u0120rookie", - "ikat", - "\u0120biscuits", - "\u0120cords", - "\u00d1\u0125\u00d0\u00b2\u00d0\u00b0\u00d1\u0124\u00d0\u00b8", - "ARIN", - "\u0120progressing", - "\u0120Gir", - "\u0120penetrate", - "\u0120Storage", - "eight", - "\u0120\u00d1\u0124\u00d1\u0122\u00d1\u0125", - "\u0120don\u00c3\u0143t", - "\u0120sizin", - "\u0120outdated", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d1\u012a\u00d0\u00b8", - "\u0120affir", - "\u0120spoons", - "\u0120oni", - "\u0120flank", - "\u0120Gol", - "h\u00c3\u00a3", - "\u0120p\u00c3\u00a9ri", - "\u0120honorable", - "\u0120Breathe", - "scenes", - "\u0120obviamente", - "\u00d0\u00b8\u00d0\u00ba\u00d1\u0123", - "\u0120\u00d7\u00a9\u00d7\u0140\u00d7", - "\u0120smoothie", - "\u0140\u012a\u00eb", - "\u0120dime", - "\u0120\u00ed\u0138\u012a\u00ec\u0138\u00b4\u00ec\u013c\u0136", - "\u0120appel", - "\u0120Catholics", - "\u0120singles", - "\u0120laten", - "\u0120\u00c3\u00a7\u00c3\u00bcnk\u00c3\u00bc", - "\u0120Vader", - "\u00e6\u0131\u013d", - "\u0120vard\u00c4\u00b1", - "\u0120Istanbul", - "gr\u00c3\u00a9", - "\u0120Elsa", - "\u00c3\u00abl", - "\u0120invece", - "\u0120crane", - "\u0120obe", - "\u0120Shark", - "\u0120smack", - "\u0120restoring", - ".\\", - "\u0120\u00eb\u00b9\u0142\u00eb", - "\u0120faded", - "umbers", - "Singing", - "\u0120depressing", - "thest", - "\u0120Wahr", - "\u0120multitude", - "\u00d1\u0122\u00d0\u00b0\u00d0\u00b2\u00d1\u0123\u00d1\u0124\u00d0\u00b2\u00d1\u0125\u00d0\u00b9\u00d1\u0124\u00d0\u00b5", - "rijk", - "eka", - "\u0120completes", - "\u0120Wells", - "\u0120roy", - "\u0120Pray", - "\u0120Kalau", - "izin", - "ia\u00c5\u0124em", - "\u0120locom", - "\u0120Nashville", - "\u0120Pentagon", - "\u00eb\u00af\u00b8", - "\u0120NEW", - "\u00c4\u0127\u00c4\u0129", - "\u00c3\u0143ss", - "\u0120marrying", - "\u0120feud", - "\u00ed\u013b\u0137", - "\u00e6\u0122\u00a5", - ")!", - "\u0120Operations", - "\u00d1\u0125\u00d1\u0136", - "\u0120moje", - "\u0120instructed", - "\u0120\u00eb\u012a\u0126\u00ea\u00b5\u00ac", - "\u0120\u00d7\u0136\u00d7\u0134", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00bc\u00d0\u00be\u00d1\u012b\u00d1\u012e\u00d1\u0130", - "\u0120sabia", - "\u00ec\u0137\u013a\u00ec\u0138\u00b4\u00ec\u013c\u0136", - "plane", - "pri", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00bb\u00d0\u00bd\u00d0\u00be\u00d1\u0123\u00d1\u0124\u00d1\u012e\u00d1\u0130", - "\u0120Kitty", - "\u0120pr\u00c3\u00b3prio", - "edere", - "\u0120interesante", - "\u0120\u00d0\u00b4\u00d0\u00b5", - "\u0120condensed", - "\u0120avent", - "TOR", - "\u0120greasy", - "ARK", - "orta", - "AJ", - "\u0120disreg", - "\u0120corrections", - "\u0120stero", - "\u0120influenza", - "\u0120desses", - "\u0120ballots", - "\u0120meget", - "\u0120mafia", - "\u0120b\u00c3\u00b6l", - "nost", - "\u0120\u00d1\u0123\u00d1\u0124\u00d0\u00b0\u00d1\u0124\u00d1\u012e", - "\u0120responder", - "\u0120hinten", - "grav", - "\u00e0\u00b8\u0143\u00e0\u00b8\u00b0", - "ynchron", - "\u0120viens", - "\u0120samo", - "\u0120dt", - "pannt", - "\u0120\u00c5\u013dwiat", - "\u0120\u00d0\u00b7\u00d0\u00b0\u00d0\u00bf\u00d0\u00b8\u00d1\u0123", - "\u0120merged", - "\u0120kep", - "\u0120misleading", - "\u0120digamos", - "\u0120ammon", - "\u00e8\u00be\u013d", - "chet", - "\u0120\u00ea\u00b0\u0122\u00ec\u0142\u00b8", - "\u0120uni", - "\u0120\u00eb\u0132\u013a\u00eb\u012c\u0136\u00eb\u012f\u00b0", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d0\u00bf\u00d1\u0122\u00d0\u00b0\u00d0\u00b2", - "\u0120\u00d0\u00ba\u00d0\u00be\u00d1\u0124\u00d0\u00be\u00d1\u0122\u00d0\u00be\u00d0\u00b3\u00d0\u00be", - "\u0120animate", - "\u00d7\u0137\u00d7\u0132\u00d7", - "\u00d0\u00b5\u00d1\u0122\u00d0\u00b2", - "\u0120minced", - "\u0120kaum", - "\u00e3\u0123\u0124\u00e3\u0123\u0123", - "\u00cf\u0122\u00ce\u00b5", - "\u00d0\u00bb\u00d0\u00b5\u00d0\u00b3", - "existing", - "\u0120plataform", - "\u0120KRIS", - "\u00ec\u013d\u0142", - "\u0120Familien", - "\u0120Libya", - "\u0120biodiversity", - "\u0120idiots", - "irdi", - "\u0120szyb", - "\u0120Rolling", - "\u00c3\u00bccht", - "\u0120\u00d1\u0125\u00d0\u00b4\u00d0\u00b8\u00d0\u00b2", - "\u00d1\u0123\u00d1\u0125\u00d0\u00b4", - "\u0120realizar", - "\u0120canned", - "\u0120\u00d1\u0122\u00d0\u00b0\u00d0\u00bd", - "\u0120metabolic", - "\u0120Beef", - "\u0120kilka", - "\u00d0\u00bb\u00d1\u0130\u00d1\u0123", - "\u0120registry", - "\u00d0\u00bc\u00d0\u00be\u00d1\u0124\u00d1\u0122\u00d0\u00b8\u00d1\u0124\u00d0\u00b5", - "\u0120viel\u00c3\u00a4", - "\u0120odc", - "\u0120condemned", - "\u00e6\u00a9\u012d", - "fal", - "\u0120Dil", - "wo\u00c5\u013dci", - "Aw", - "\u0120statistically", - "\u0120sogen", - "\u0120BETH", - "\u0120shaving", - "\u00e5\u00b9\u00b8", - "ocal", - "\u0120Funny", - "\u0120peacefully", - "\u0120addictive", - "\u0120Insert", - "lauf", - "\u0120experiencia", - "\u00e9\u00a6\u0138\u00e5\u0127\u012a", - "\u00d0\u00b8\u00d1\u0124\u00d0\u00b5\u00d0\u00bb\u00d1\u0131", - "\u00c3\u0143gen", - "\u00c3\u00a1gina", - "\u0120abdomen", - "\u00ed\u0137\u013e\u00eb\u012d\u00a4", - "icus", - "imana", - "\u00ec\u012f\u00a8", - "arching", - "\u0120konkret", - "\u00ec\u0137\u013a\u00eb", - "\u00d0\u00b5\u00d0\u00ba\u00d0\u00b0", - "oufl", - "ivel", - "\u0120nude", - "\u00c3\u00a8tres", - "\u0120monsieur", - "\u0120clash", - "\u0120therapists", - "\u0120cubed", - "\u0120retrouver", - "\u0120waveform", - "\u0120potem", - "\u0120Former", - "isi\u00c3\u00b3n", - "\u00e5\u00ba\u013e", - "\u0120\u00d7\u0132\u00d7\u013f", - "undos", - "\u0120Meinung", - "\u00d8\u00b5\u00d9\u0126", - "\u0120Jude", - "\u0120n\u00c3\u00a5r", - "\u0120Leonardo", - "\u0120Cristo", - "\u0120GOT", - "\u00d1\u0123\u00d1\u0124\u00d1\u0122\u00d1\u0125\u00d0\u00ba", - "LAN", - "\u0120g\u00c3\u00a5ng", - "\u0120d\u00c3\u00a9b", - "\u0120Frankfurt", - "\u0120crappy", - "\u0120lil", - "ann\u00c3\u00a9e", - "\u0120\u00d0\u00bc\u00d0\u00b5\u00d1\u0123\u00d1\u0124\u00d0\u00b5", - "RET", - "\u0120Ner", - "\u0120COSTA", - "\u0120jedem", - "\u0120curtains", - "\u0120iterations", - "\u0120unav", - "\u0120plaque", - "orum", - "\u0120\u00ce\u00b6", - "\u0120n\u00c3\u00bameros", - "\u0120desap", - "\u00b2\u00bd", - "\u0120compiled", - "\u0120refle", - "\u0120rankings", - "\u0120repaired", - "\u0120\u00d0\u013f\u00d0\u00b0\u00d0\u00bf\u00d1\u0122", - "\u0120downloads", - "\u0120armour", - "\u0120\u00d7\u013b\u00d7\u0137\u00d7\u00aa\u00d7\u00a8", - "\u0120longevity", - "\u0120TONER", - "\u0120\u00d0\u00ba\u00d0\u00be\u00d0\u00bc\u00d0\u00bc\u00d0\u00b5\u00d0\u00bd\u00d1\u0124\u00d0\u00b0\u00d1\u0122", - "\u0120czego", - "\u0120notify", - "\u0120airports", - "\u0120enduring", - "lette", - "\u0120apparat", - "\u0120habil", - "\u00e1\u00bb\u0129c", - "nad", - "ICO", - "\u0120Brah", - "\u0120seg\u00c3\u00ban", - "\u0120governors", - "kaha", - "\u0120Schluss", - "\u0120odpowied", - "irting", - "\u0120rempl", - "\u0120Aboriginal", - "identally", - "\u0120enhancing", - "licting", - "\u0120Hawaiian", - "\u0120striving", - "\u0120Niet", - "\u0120znaczy", - "\u0120obedience", - "\u0120n\u00c3\u00a5got", - "\u0120expired", - "\u01201918", - "presented", - "\u0120prowad", - "\u0120Terr", - "\u0120Princeton", - "\u0120morgen", - "\u0120attracting", - "\u0120Sigma", - "igner", - "\u0120Rechts", - "\u0120Peki", - "\u0120methy", - "\u0120hamm", - "\u0120direito", - "\u0120delegation", - "\u00d0\u00b8\u00d0\u00b2\u00d0\u00b0\u00d1\u0130\u00d1\u0124", - "\u0120gin", - "Young", - "\u0120dependencies", - "\u0120Bradley", - "buds", - "\u0120fis", - "\u0120pytanie", - "\u0120interconnected", - "\u0120embaixo", - "\u0120Sas", - "\u0120ruh", - "\u0120Sicht", - "Sur", - "\u0120superb", - "\u0120Sabbath", - "\u0120Danger", - "kol", - "\u0120hou", - "supp", - "\u0120Nacional", - "\u0120succession", - "\u0120v\u00c3\u00a1", - "\u0120Ma\u00c3\u0141nahmen", - "\u0120Jessie", - "\u0120Idaho", - "forest", - "\u0127\u013a", - "\u0120\u00d7\u0140\u00d7\u0135", - "\u0120\u00d8\u00a3\u00d9\u012c", - "\u0120sweetheart", - "\u0120neatly", - "\u0120Evangel", - "\u00ea\u00b3\u00a1", - "\u0120Suite", - "\u00c3\u00bablica", - "\u0120\u00d1\u0125\u00d0\u00bb\u00d0\u00b8", - "\u0120Announcer", - "ligh", - "\u0120sensations", - "\u0120shelters", - "\u0120hart", - "\u0120squeezing", - "\u0120Rivers", - "\u0120Cooking", - "\u00ec\u00b1\u0127", - "personal", - "\u0120manos", - "\u00d1\u0133\u00d1\u0124\u00d1\u0123\u00d1\u0131", - "wij", - "\u0120gogg", - "\u0120Milli", - "\u0120FP", - "\u00c3\u00bcnst", - "\u0120LS", - "\u0120spraying", - "\u0120faux", - "\u0120autograph", - "ologic", - "\u0120torment", - "\u0120encrypted", - "\u00e1\u00bb\u0127", - "\u0120estre", - "\u00e7\u00b9\u00bc", - "\u00e0\u00b1", - "\u0120stumbled", - "\u0120aider", - "\u0120saben", - "xter", - "\u0120Cities", - "\u0120T\u00c3\u00bcrk", - "\u00eb\u012d\u00a5", - "chine", - "\u0120topping", - "\u0120poisoned", - "\u0120Romania", - "\u00d7\u0135\u00d7\u013b", - "\u0122\u00eb\u00a1\u013e", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d1\u0122\u00d1\u0131\u00d0\u00b4", - "\u0120chirping", - "\u0120\u00ec\u013b\u0126\u00eb", - "\u00d7\u0133\u00d7\u00a2", - "\u0120cuanto", - "\u0120donating", - "\u0120Regent", - "\u0120Beruf", - "\u0120distracting", - "\u0120stamina", - "\u0120Darren", - "\u0120\u00ec\u00b6\u0137", - "lists", - "dal", - "chuss", - "\u0120economist", - "\u00e3\u0123\u012a\u00e3\u0125\u00bc", - "orgt", - "\u0120istiyorum", - "\u00e8\u00bf\u013d", - "\u0120Surprise", - "\u0120Hao", - "\u0120\u00ec\u00b5\u013e\u00ea\u00b3\u0142", - "\u0120GW", - "\u0120Inner", - "\u0120quieren", - "\u0120minded", - "\u0120supercomputer", - "\u0120diagrams", - "\u00ed\u012c\u013e\u00eb", - "\u00ea\u00b2\u0142\u00ec\u0138\u00b4", - "\u0120\u00d0\u00be\u00d0\u00b1\u00d1\u012c\u00d1\u0131\u00d1\u0123", - "\u0120estaban", - "\u0120destroys", - "\u0120Breaking", - "\u0120kar\u00c4\u00b1\u00c5\u0141", - "\u0120rebuilding", - "\u013e\u00eb\u012e\u0122", - "\u00d0\u00bb\u00d0\u00b8\u00d0\u00b2\u00d0\u00be", - "\u0120Sauce", - "\u0120Fusion", - "\u00d7\u0137\u00d7\u0140\u00d7", - "\u0120Quinn", - "\u0120gauche", - "\u0120\u00d9\u012a\u00d8\u00a3", - "\u0120\u00c8", - "\u00e7\u0135\u013e", - "\u0120techno", - "\u0120dispatch", - "\u0120a\u00c5\u0141k", - "\u0120einzel", - "\u0120Gmail", - "\u00e7\u0140", - "\u0120\u00ea\u00b0\u013e\u00ec\u013f\u00b8", - "\u0120\u00d1\u0123\u00d0\u00b5\u00d0\u00bc\u00d1\u012e", - "\u0120journeys", - "\u0120iht", - "\u0120fibre", - "\u0120dramas", - "ouched", - "\u0120rename", - "\u0120\u00d0\u00be\u00d0\u00bf\u00d0\u00b5\u00d1\u0122", - "\u0120poo", - "\u0120Dru", - "\u0120\u00d0\u00b8\u00d1\u0124\u00d0\u00be\u00d0\u00b3", - "\u0120zast", - "\u0120coz", - "\u0120zucch", - "\u0120obtaining", - "\u0120commute", - "\u0120submer", - "\u0120Vish", - "\u0120Rabb", - "ogg", - "\u0120hut", - "\u00ed\u0138\u012a\u00ec\u0138\u00b4", - "\u00e6\u00af\u0136\u00e5\u00a6\u0124", - "eremi", - "\u0120\u00ce\u00bc\u00ce\u00b1", - "\u0120diskut", - "\u0120\u00d0\u00b1\u00d1\u0125\u00d0\u00ba", - "\u0120impaired", - "depend", - "\u0120\u00d9\u012a\u00d8\u00a7", - "\u0120\u00d1\u0122\u00d1\u0125\u00d0\u00ba", - "\u0120\u00d0\u00b1\u00d0\u00b0\u00d1\u0122", - "\u0120oxidation", - "\u0120situa\u00c3\u00a7\u00c3\u00a3o", - "\u00c9\u013bn", - "u\u00c3\u00a7\u00c3\u00a3o", - "\u0120sagte", - "\u0120SER", - "\u0120Cake", - "\u0120turmeric", - "\u0120Kak", - "bung", - "\u0120K\u00e1\u00b9\u013d\u00e1\u00b9\u00a3\u00e1\u00b9\u0129a", - "\u0120poisoning", - "\u0120slipping", - "\u0120Says", - "\u00e5\u00b0\u00b1\u00e5\u0131\u00af\u00e4\u00bb\u00a5", - "\u00c3\u00b2ng", - "\u00e7\u0141\u00b3", - "\u00c2\u00ab", - "\u0120Claudia", - "\u0120Character", - "\u00d0\u00bd\u00d0\u00b8\u00d1\u0128", - "coat", - "\u0120progressed", - "\u0120Fergus", - "\u0120\u00ec\u013a\u00a4\u00eb\u012c", - "\u0120oat", - "ordable", - "\u0120Ley", - "\u0120Heraus", - "\u0120resultados", - "\u0120Kayla", - "\u0120riff", - "\u0120chegou", - "\u0120xi", - "\u0120spacious", - "\u0120recognised", - "\u0120ech", - "\u0120Tie", - "\u0120launcher", - "Jim", - "\u0120suppression", - "\u0120Impossible", - "\u0120guitars", - "\u0120Fourier", - "\u00d0\u00b8\u00d1\u0129\u00d0\u00b5\u00d1\u0123\u00d0\u00ba\u00d0\u00b8\u00d0\u00b9", - "\u0120Therap", - "\u0120Kaf", - "centered", - "\u0120\u00d1\u0123\u00d0\u00be\u00d0\u00be\u00d1\u0124\u00d0\u00b2\u00d0\u00b5\u00d1\u0124", - "\u0120klim", - "\u0120carbohydrates", - "ignant", - "\u0120Astron", - "\u0120emple", - "\u0120drastic", - "\u0120\u00d0\u00bc\u00d0\u00b8\u00d1\u0122\u00d0\u00b5", - "\u00d0\u00b2\u00d0\u00b8\u00d0\u00bd", - "uw", - "\u0120prettier", - "\u0120donuts", - "\u0120Athena", - "\u0120dissert", - "\u0120plante", - "\u0120uranium", - "\u00ec\u013f\u012e\u00eb", - "ar\u00c3\u00a9", - "\u0120rzecz", - "\u0120displaying", - "\u00e6\u012a\u00b2", - "\u0120sarc", - "r\u00c3\u00a3o", - "\u0120tampoco", - "\u0120philosophers", - "\u0120Recht", - "\u00e6\u0135\u013c", - "\u0120comentarios", - "yse", - "\u0120\u00ec\u013e\u00a4", - "\u0120mise", - "\u0120Gin", - "\u0120\u00d0\u00bd\u00d0\u00be\u00d0\u00bc", - "\u0120FROM", - "liner", - "atif", - "\u0120spo\u00c5\u0124ec", - "xa", - "\u0120\u00d1\u0124\u00d1\u0122\u00d1\u0125\u00d0\u00b4", - "\u0120wag", - "\u00ea\u00b8\u00b0\u00ec\u0139\u0132", - "\u0120MG", - "\u0120offspring", - "\u0120Understanding", - "\u00e5\u0131\u00aa\u00e6\u013a\u00af", - "ORA", - "\u0120whirring", - "\u0120surrend", - "\u0120poker", - "\u0120monuments", - "\u0120\u00e2\u013b\u00a9", - "\u0120organised", - "\u0120Sozial", - "\u0120Factory", - "\u00d1\u0127\u00d0\u00b0", - "\u0120resemble", - "\u00d0\u00b7\u00d0\u00b4", - "\u0120explosions", - "\u0120payroll", - "\u0120omn", - "\u0120Jorge", - "\u00ce\u00b9\u00cf\u0125", - "\u0120fracture", - "\u0120persecution", - "\u0120demais", - "ECH", - ",)", - "\u0120criar", - "\u0120JOSH", - "\u0120demographics", - "\u01201600", - "\u0120currencies", - "\u0120Tips", - "\u0120\u00e9\u0122\u013b\u00e5\u0122\u012d", - "\u0120Refer", - "\u0120Dancing", - "\u0120inconsistent", - "\u0120deh", - "\u0120immens", - "\u0120meist", - "\u0120impatient", - "\u0120behaves", - "\u00e6\u013f\u00be", - "\u0120\u00eb\u0124\u00b4\u00ec\u013c\u00a9", - "\u0120backstory", - "\u0120agreeing", - "\u0120\u00c5\u0123", - "ihin", - "\u0120temperatura", - "\u0120Background", - "\u0120nutzen", - "\u0120\u00eb\u0127\u00b9", - "\u0120M\u00c3\u00a4nner", - "\u0120collaborations", - "\u0120Kos", - "\u00e9\u0123\u0130\u00e5\u0130\u00bb", - "\u0120nightmares", - "\u00eb\u0135\u00b1", - "\u0120Queensland", - "\u0120associates", - "\u0120Kok", - "\u0120factorial", - "\u0120Hyung", - "\u0120\u00ea\u00b7\u00b8\u00eb\u012d\u00a4\u00ec\u013f\u012e", - "\u0120filho", - "\u0120el\u00c3\u00a9t", - "\u0120\u00ed\u0138\u012b\u00eb\u00b3\u00b5", - "\u00b0\u00b1", - "\u0120gefunden", - "\u0120semicondu", - "\u0120counselors", - "\u0120Upper", - "\u0120Aub", - "ickers", - "Ver", - "\u0120northwest", - "\u0120Maintenant", - "\u0120Lakes", - "\u00d0\u00b0\u00d1\u0131\u00d0\u00b2", - "int\u00c3\u00a9", - "\u00ec\u00b0\u00bd", - "\u0120\u00d0\u00b3\u00d0\u00b0\u00d0\u00b7", - "\u0120giorn", - "\u0120digitally", - "\u0120Circuit", - "\u00ec\u00bc\u0122", - "\u00e3\u0124\u012c\u00e3\u0123\u00be\u00e3\u0123\u0139\u00e3\u0123\u0141", - "\u0120cheerful", - "\u0120Peterson", - "\u0120Danish", - "ativos", - "\u0120liken", - "\u0120harbor", - "\u00d0\u00b0\u00d0\u00bb\u00d0\u00b8\u00d1\u0123\u00d1\u0124", - "xe", - "\u0120curls", - "\u0120Rhod", - "End", - "\u0120ET", - "\u0120acquaint", - "\u0120Kelvin", - "\u0120trif", - "\u0120Away", - "\u00ec\u0140\u0132\u00eb\u012c\u0136", - "vs", - "\u0120p\u00c3\u00a1gina", - "\u0120inlet", - "\u0120Santos", - "\u0120\u00ec\u013c\u00b0\u00ec\u013b\u0122", - "\u0120yap\u00c4\u00b1yorsun", - "theme", - "\u0120souff", - "\u0120injected", - "\u0120p\u00c3\u00b3\u00c5\u00baniej", - "iverso", - "amped", - "\u0120daher", - "\u0120dagger", - "\u0120\u00d0\u00bb\u00d1\u0130\u00d0\u00b1\u00d0\u00b8\u00d0\u00bc", - "\u0120tummy", - "\u0120enlightened", - "cents", - "\u0120Dah", - "\u0120cuest", - "\u00e4\u00be\u0128\u00e8\u00aa\u00aa", - "ILY", - "\u0120\u00d7\u0133\u00d7\u00a8", - "\u0120banging", - "\u0120Emil", - "\u0120Cler", - "\u0120Border", - "\u00d0\u00b8\u00d0\u00b6\u00d1\u0125", - "\u0120presenters", - "\u0120STUD", - "coins", - "\u0120\u00ed\u013b\u012f", - "\u0120perks", - "\u0120parap", - "\u0120certaines", - "\u0120Lore", - "\u00c3\u00b6st", - "\u0120MARTIN", - "\u0120bios", - "\u0120whereby", - "verts", - "\u0120Miranda", - "\u0120stip", - "\u00e6\u00be\u00a4", - "andez", - "\u00d7\u013d\u00d7\u013e", - "ujin", - "\u0120\u00ea\u00be", - "\u0120allergies", - "plate", - "\u0120yap\u00c4\u00b1l", - "\u0120undertake", - "\u0120\u00eb\u0124\u013a\u00ea\u00b0\u0122", - "Part", - "\u0120k\u00c4\u00b1z\u00c4\u00b1m", - "hguru", - "\u00e3\u0123\u0124\u00e3\u0123\u00a8", - "\u0120Johns", - "\u0120eyelashes", - "\u0120drained", - "\u0120st\u00c3\u00a5r", - "\u00e3\u0123\u0124\u00e3\u0124\u012c\u00e3\u0123\u00be\u00e3\u0123\u013b", - "\u0120Jade", - "\u0120calend", - "film", - "\u0120mesa", - "\u0120ludzie", - "\u0120attracts", - "\u0120juices", - "\u0120\u00d0\u00ba\u00d0\u00b8\u00d0\u00bb", - "\u0120nieuwe", - "\u0120mencion", - "\u0120ignition", - "\u0120bladder", - "andaag", - "\u0120Extension", - "\u00ed\u0124\u00a8", - "feed", - "\u0120\u00d9\u012a\u00d9\u0129", - "\u0120spun", - "\u0120t\u00c3\u00a4t", - "\u00d0\u00be\u00d1\u0122\u00d0\u00be\u00d1\u0124", - "tyard", - "ronics", - "\u0120Huge", - "\u00d1\u0125\u00d0\u00b6\u00d0\u00b4", - "string", - "\u0120unjust", - "\u0120prawn", - "\u0120frosting", - "\u0120disappearance", - "iosa", - "\u0120cardi", - "\u0120Priest", - "\u0120cient\u00c3\u0143fic", - "\u00e5\u0135\u00aa\u00e8\u00a3\u00a1", - "\u0120\u00d0\u0134\u00d0\u00b0\u00d1\u0123", - "\u0120\u00eb\u00b6\u0122\u00ed\u0125\u0123", - "\u0120thieves", - "\u0120physique", - "\u0120Eugene", - "\u0120\u00d0\u00b1\u00d0\u00bb\u00d0\u00b8\u00d0\u00b7", - "\u0120monopoly", - "\u0120biography", - "\u0120ho\u00c5\u0141", - "\u0120t\u00c3\u00b6", - "mac", - "\u0120shocks", - "\u00ec\u0126\u00b8\u00eb", - "hit", - "\u0120snug", - "\u0120incl", - "\u0120dedic", - "\u0120ultras", - "\u0120\u00d0\u00b8\u00d0\u00b7\u00d0\u00b2\u00d0\u00b5\u00d1\u0123\u00d1\u0124", - "\u0120utilization", - "\u0120\u00d1\u0123\u00d0\u00be\u00d0\u00b2\u00d0\u00b5\u00d1\u0122\u00d1\u012a\u00d0\u00b5\u00d0\u00bd\u00d0\u00bd\u00d0\u00be", - "\u0120servi", - "stag", - "180", - "\u0120sewer", - "\u0120Choice", - "\u0120discharged", - "\u0120JD", - "\u00d0\u00be\u00d0\u00bb\u00d0\u00b5\u00d1\u0124", - "\u0120\u00d0\u00ba\u00d0\u00b2\u00d0\u00b0\u00d1\u0122\u00d1\u0124\u00d0\u00b8", - "\u0120telescop", - "\u0120Je\u00c5\u013dli", - "\u0120Nana", - "cale", - "\u0120\u00d1\u0124\u00d0\u00be\u00d0\u00bd", - "mmm", - "\u00e4\u00ba\u0128\u00e5\u0132\u00a7", - "\u0120gehabt", - "\u00eb\u0124\u0142", - "\u00e6\u012c\u0137", - "\u00e0\u00b8\u013b\u00e0\u00b8\u013b", - "\u0120ether", - "\u0120zen", - "\u0120researched", - "\u0120Czyli", - "\u00e5\u00ae\u012e\u00e5\u0127\u00a8", - "workers", - "\u0120\u00ea\u00b2\u00bd\u00ec\u00b0\u00b0", - "\u0120sheriff", - "allo", - "\u0120tipos", - "\u0120prosecution", - "\u0120frogs", - "\u0120falt", - "jd", - "\u0120\u00ed\u012e\u0136", - "\u0120filtered", - "\u0120Oft", - "\u0120\u00ec\u012f", - "\u0120disfr", - "\u0120Mustang", - "\u0120woah", - "\u0120REALLY", - "\u0120\u00d0\u00bc\u00d0\u00be\u00d0\u00b3\u00d0\u00bb\u00d0\u00b8", - "\u0120entrada", - "\u0120\u00d0\u00b8\u00d0\u00b3\u00d1\u0122\u00d0\u00b0", - "\u0120mixes", - "\u0120\u00d0\u00b0\u00d0\u00b2\u00d1\u0124\u00d0\u00be\u00d0\u00bc\u00d0\u00be\u00d0\u00b1", - "\u00d0\u013b", - "\u0120shin", - "\u0120paranormal", - "\u0120someplace", - "\u0120dishon", - "etaan", - "\u0120fuerte", - "\u00d9\u00b9", - "\u0120doom", - "\u00ec\u012a\u013e", - "\u0120existential", - "\u0120buld", - "\u0120SDK", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b0\u00d0\u00b2\u00d0\u00b4\u00d0\u00b0", - "\u0120turnover", - "\u0120\u00ec\u0139\u00ac\u00ea\u00b8\u00b0\u00ec\u0139\u0132", - "\u0120\u00e0\u00a4\u00b9", - "\u0120modeled", - "\u0120bug\u00c3\u00bcn", - "\u0120experimentation", - "\u0120mornings", - "\u0120medo", - "Stevie", - "\u0120playable", - "\u0120airlines", - "gments", - "\u0120\u00ea\u00b8\u00b0\u00eb\u00b6\u0126", - "\u0120Tomb", - "\u0120MVP", - "AUDIENCE", - "\u0120checkout", - "\u0120passt", - "\u0120beispiel", - "\u0120Links", - "heavy", - "\u0120questionable", - "\u0120\u00ec\u0135\u00b0\u00eb", - "\u0120sill", - "\u0120manipulated", - "\u0120Loren", - "\u0120\u00ec\u013e\u00bc", - "\u0120verge", - "\u00c3\u00a1k", - "IES", - "\u0120sabot", - "\u0120Customer", - "ale\u00c5\u00bcy", - "\u0120nominee", - "\u0120Gad", - "\u0120nouvelles", - "\u0120SPE", - "istling", - "\u0120oval", - "\u00d0\u00be\u00d0\u00b1\u00d1\u0122\u00d0\u00b0\u00d0\u00b6", - "ifty", - "\u00e9\u0129\u0130", - "\u0120bezel", - "yet", - "\u0120freight", - "\u0120Han\u00c4\u00b1m", - "r\u00c3\u0143a", - "\u0120zoning", - "\u0120indem", - "\u0120B\u00c3\u00bc", - "\u0120feminism", - "\u0120voix", - "\u0120oficial", - "\u0120diyorum", - "\u00bb\u0132", - "\u0120arose", - "\u0120parar", - "\u00ec\u013f\u00b8\u00ec\u00a7\u0122", - "\u0120Martine", - "\u0120Lect", - "\u0120rester", - "\u0120drowning", - "uya", - "cida", - "\u0120Ariel", - "\u012002", - "\u0120\u00d7\u0136\u00d7\u0136", - "\u00e7\u00b4\u0142", - "\u0120Wert", - "\u00d0\u00a2\u00d1\u012d", - "\u0120widow", - "\u0120parchment", - "\u0120cottage", - "\u0120XL", - "\u0120Slack", - "\u0120NES", - "\u0120robe", - "\u0120gimm", - "\u0120caminho", - "\u0120Harper", - "\u0120citrus", - "\u0120firefighters", - "\u0120dopamine", - "elets", - "\u0120democrat", - "\u00ec\u0142\u013e\u00eb\u00a1\u013e", - "\u0120playback", - "oj", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d0\u00ba", - "\u0120Sullivan", - "semble", - "\u0120Worth", - "\u0120Mustafa", - "\u00e0\u00b8\u00b2\u00e0\u00b8\u00a3", - "\u0120mets", - "\u00e9\u0138\u0122", - "\u00d0\u00bb\u00d0\u00be\u00d1\u0123\u00d1\u012e", - "\u0120inertia", - "\u0120uniforms", - "\u00e8\u00b6\u00b3", - "\u00c3\u00a9rio", - "\u00d7\u0137\u00d7\u00a8\u00d7\u0136", - "\u00c3\u00a9nt", - "\u0120\u00e0\u00ae\u0134", - "\u0120\u00d1\u0123\u00d0\u00b0\u00d0\u00bc\u00d1\u012d\u00d1\u0127", - "\u0120voulais", - "\u0120Zimmer", - "\u00ea\u00b2\u0142\u00eb", - "\u0120\u00d0\u00bd\u00d0\u00be\u00d1\u0123", - "encias", - "\u0120relaci\u00c3\u00b3n", - "\u0120\u00ea\u00b1\u00b8\u00eb", - "\u0120faction", - "\u0120gosp", - "\u00d0\u00bf\u00d0\u00be\u00d0\u00bb\u00d0\u00be\u00d0\u00b6", - "nap", - "hak", - "\u0120proceedings", - "\u0120\u00ec\u0128\u0136", - "\u00ec\u0137\u0126\u00eb\u012d\u012a", - "\u0120\u00ec\u0140\u0132\u00ea\u00b8\u00b0", - "\u0120werd", - "\u0120sof", - "\u0120schlim", - "\u0120flavored", - "\u0120quadratic", - "\u0120Boot", - "\u0120publicity", - "\u0120Caro", - "\u0120?\"", - "\u00d0\u00bd\u00d0\u00b8\u00d1\u0128\u00d0\u00b0", - "mania", - "\u0120SUR", - "\u0120BUR", - "lance", - "\u00c3\u00a9tica", - "\u0120zobaczy", - "\u0120trio", - "sama", - "\u0120ta\u00c5\u0141", - "\u0120asymm", - "resser", - "\u0120\u00d8\u00aa\u00d8\u00b9", - "\u0120\u00d0\u00bf\u00d0\u00b5\u00d1\u0123", - "\u0120beginnings", - "lad\u00c4\u00b1m", - "\u0120\u00d0\u00b1\u00d1\u012d\u00d1\u0123\u00d1\u0124\u00d1\u0122", - "\u0120moo", - "\u0120Geneva", - "\u0120\u00e5\u013e\u00a8", - "erus", - "borah", - "\u0120refusing", - "bull", - "\u0120Waiting", - "\u0120Individual", - "\u0120anonym", - "imens", - "\u0120medidas", - "\u0120fragrant", - "\u0120directement", - "\u0120\u00ec\u0137\u0126\u00eb\u00a7\u012a", - "uria", - "\u0120spherical", - "\u0120abge", - "\u0120Victorian", - "\u0120spectacle", - "\u0120Rodriguez", - "\u0120ocup", - "\u0120N\u00c3\u00a4r", - "marks", - "ngulo", - "\u0120Luci", - "\u0120shouted", - "\u0120regulators", - "\u00c4\u0141ini", - "\u0120disent", - "\u0120\u00d1\u0122\u00d1\u012d\u00d0\u00bd", - "\u00eb\u0124\u00a8", - "\u0120\u00ec\u0124\u00b4\u00eb", - "\u0120probl\u00c3\u00a8mes", - "\u0120Finger", - "assemble", - "\u0120pear", - "\u0120droite", - "\u0120Everywhere", - "tam", - "\u00d0\u00be\u00d1\u0124\u00d0\u00b8\u00d0\u00b2", - "\u00d0\u00b2\u00d0\u00be\u00d0\u00b9", - "ordinate", - "\u0120Lak", - "\u0120m\u00e1\u00bb\u013di", - "\u0120Television", - "\u0120exponentially", - "avas", - "\u0120blev", - "\u0120MT", - "\u00e4\u00bf\u00ba", - "Connell", - "\u0120\u00ea\u00b5\u0143\u00eb\u00af\u00bc", - "\u0120\u00d1\u0123\u00d0\u00b2\u00d0\u00be\u00d0\u00b8\u00d0\u00bc", - "\u0120acha", - "\u0120Dynasty", - "Jin", - "\u0120tore", - "\u0120flor", - "\u0120\u00d0\u00bc\u00d0\u00bd\u00d0\u00be\u00d0\u00b3\u00d0\u00b8\u00d0\u00b5", - "\u00e6\u00b2\u0134\u00e4\u00ba\u012d", - "owan", - "bah", - "\u0120\u00ec\u00a3\u0126", - "\u0120Cela", - "\u0120\u00ec\u00b5\u013e\u00ea\u00b7\u00bc", - "\u0120permettre", - "\u0120abras", - "\u0120verstehen", - "\u0120escort", - "\u0120Them", - "\u00c3\u00a4rke", - "porter", - "\u0120kahkaha", - "\u0120hect", - "\u0120dau", - "wah", - "olve", - "\u0120Ages", - "schaft", - "\u0120Stell", - "nelle", - "\u0120Ensuite", - "\u0120\u00d0\u0134\u00d1\u0123\u00d0\u00b5\u00d0\u00bc", - "\u0120cr\u00c3\u00a9d", - "\u0120PP", - "lords", - "grunting", - "\u0120contraction", - "Got", - "\u0120acquiring", - "\u0120sopr", - "\u0120poisonous", - "RNA", - "\u0120anar", - "\u0120Hof", - "')", - "\u0120remarkably", - "\u0120internacional", - "\u00c3\u00bccke", - "inqu", - "\u0120duy", - "\u0120beasts", - "\u0120LAN", - "\u0120precedent", - "\u0120RPM", - "\u00e5\u0133\u00a8", - "\u0120selon", - "\u0120morte", - "\u0120come\u00c3\u00a7ou", - "\u00d1\u0131\u00d0\u00bb\u00d0\u00b0", - "\u0120interpreting", - "\u0120Burke", - "\u00d1\u0124\u00d1\u0122\u00d0\u00b0", - "\u0120\u00ec\u013f\u00b4\u00eb\u0141\u00ac", - "\u0120pessim", - "\u0120Nok", - "\u00ed\u012e\u013f", - "Female", - "\u0120\u00ec\u012d\u00a4\u00ed", - "\u013b\u0122", - "\u0120stimulation", - "\u0120slick", - "\u0120\u00ea\u00b0\u0122\u00eb\u012c\u0136", - "\u0120\u00d0\u00ba\u00d0\u00b0\u00d0\u00b7", - "\u0120HBO", - "\u0120papier", - "\u0120k\u00c3\u00b6nnten", - "\u00d1\u0125\u00d0\u00b1\u00d0\u00bb\u00d0\u00b8", - "\u0120Constant", - "SPEAKING", - "\u0120kt\u00c3\u00b3r\u00c4\u0127", - "\u0120cosmetics", - "\u0120Trend", - "\u0120robbery", - "\u0120titt", - "\u0120gjort", - "\u0120dietary", - "\u0142\u012e", - "\u0120Kirby", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b8\u00d0\u00bc\u00d0\u00b5\u00d1\u0122\u00d0\u00bd\u00d0\u00be", - "\u0120qualification", - "\u0120\u00ec\u0137\u012b", - "\u0120cabinets", - "\u0120http", - "\u0120Erica", - "\u00e7\u00be\u00a9", - "\u0120disadvantages", - "\u0120chattering", - "yz", - "feit", - "\u0120guild", - "\u0120ETF", - "\u0120Dragons", - "\u0120HERE", - "venth", - "\u00d9\u0126\u00d8\u00a7\u00d9\u0127", - "\u0120march\u00c3\u00a9", - "Dam", - "\u0120photon", - "\u0120estable", - "Mag", - "\u0120olhar", - "\u0120coupling", - "\u0120Hilfe", - "\u0120Wizard", - "\u0120\u00d0\u00bc\u00d0\u00b0\u00d0\u00bb\u00d0\u00be", - "help", - "\u0120l\u00c3\u0143nea", - "\u0120\u00ec\u00ab", - "\u0120standalone", - "\u0120morale", - "\u0120zweite", - "\u00e3\u0124\u012a\u00e3\u0124\u012f\u00e3\u0123\u0139\u00e3\u0123\u0131", - "\u00c3\u00a4hrt", - "\u0120dotted", - "\u0120dripping", - "\u0120Flag", - "\u00e9\u013f\u0134", - "rocket", - "rategy", - "irim", - "\u0120\u00ed\u0137\u013a\u00eb\u00a9\u00b4\u00ec\u0126\u013e", - "\u0120sogenan", - "\u0120Uno", - "\u0120Schutz", - "\u0120estilo", - "\u0120Subs", - "\u0120Daisy", - "\u00d0\u013f\u00d0\u00b5\u00d1\u0124", - "'...", - "\u0120platinum", - "\u0120birl", - "\u0120Sovi", - "\u0120violate", - "\u00d1\u0125\u00d0\u00b5\u00d1\u0124\u00d1\u0123\u00d1\u0131", - "rill", - "\u0120traz", - "\u0120snip", - "\u0120cumpl", - "\u00e0\u00b8\u0143\u00e0\u00b8\u0123", - "\u0120cuk", - "\u00e9\u0127\u0134", - "\u0120Parlament", - "\u0120hypert", - "\u0120pulp", - "\u0120tongues", - "atto", - "\u0120busca", - "ihn", - "ERO", - "\u0120\u00d9\u012c\u00d8\u00b9", - "\u0120varias", - "\u0120Marian", - "\u0120bounded", - "\u0120pitching", - "\u0120deficiency", - "\u0120Blessed", - "\u0120Exerc", - "uchs", - "\u0120nh\u00c6\u00b0ng", - "\u00e6\u013e\u00ac\u00e5\u00bd\u0135", - "\u0120raped", - "hales", - "\u0120mala", - "pic", - "\u0120401", - "\u00c5\u013dniej", - "arina", - "\u00eb\u0135\u00a4\u00ec\u013f\u0126", - "otti", - "\u0120\u00d0\u00b4\u00d0\u00be\u00d0\u00bb\u00d0\u00b3\u00d0\u00be", - "\u0120tracker", - "\u0120Shelby", - "\u0120vanished", - "\u0120bakery", - "Kap\u00c4\u00b1", - "Jesus", - "\u0120KR", - "JO", - "\u0127\u00b8", - "\u0120discs", - "\u00ec\u0126\u00af", - "\u00ec\u00a7\u0122\u00eb", - "\u00d7\u013b\u00d7\u00a6", - "emary", - "Kendra", - "\u0120y\u00c3\u00bck", - "\u00c3\u00bcckt", - "\u0120vaz", - "\u0120kup", - "aktu", - "\u0120\u00d1\u0123\u00d0\u00bf\u00d0\u00b0\u00d1\u0123\u00d0\u00b8\u00d0\u00b1\u00d0\u00be", - "\u0120aik", - "\u0120nursery", - "\u0120endangered", - "\u00c3\u00aamement", - "ematics", - "\u0120responders", - "\u0120Representatives", - "\u0120sculptures", - "igkeiten", - "\u0120depl", - "\u0120interpretations", - "\u0120deadlines", - "\u01201942", - "\u00c3\u0139", - "\u0120sugars", - "emu", - "lively", - "\u0120recreational", - "\u0120distort", - "\u0120underscore", - "\u0120unquote", - "\u0120safest", - "\u0120swollen", - "\u0120analyses", - "\u0120commenc\u00c3\u00a9", - "\u00e5\u00a6\u00b9", - "andin", - "\u0120\u00d0\u00a5\u00d0\u00be\u00d1\u0122\u00d0\u00be\u00d1\u012a\u00d0\u00be", - "\u0120diarr", - "\u00e3\u0123\u00be\u00e3\u0123\u0123", - "ziest", - "\u0120toothbrush", - "\u00e9\u0142\u00bb\u00e9\u0123\u0135", - "uations", - "\u0120cade", - "\u0120backlash", - "hind", - "\u0120risque", - "zess", - "\u0120\u00ec\u013f\u00b4\u00ec\u0137\u00bc\u00ea\u00b8\u00b0", - "\u0120esperar", - "\u0120translations", - "ioned", - "groans", - "\u0120\u00d0\u00bf\u00d1\u0125\u00d1\u0124", - "\u0120genetically", - "\u00e9\u0122\u0142", - "\u0120happiest", - "\u0120werk", - "atoon", - "\u0120musi", - "\u0120fun\u00c3\u00a7\u00c3\u00a3o", - "\u0120\u00ec\u0140\u0127\u00eb\u012d\u012a\u00eb\u012d\u00a4", - "\u0120\u00d1\u0122\u00d0\u00b0\u00d0\u00b9", - "\u0120bevor", - "BLANK", - "\u0120repentance", - "Put", - "\u0120potrzeb", - "\u0120sala", - "\u0120campa", - "WER", - "\u0120dec\u00c3\u0143a", - "\u0120s\u00c3\u00a9curit\u00c3\u00a9", - "\u0120Appreciate", - "\u00d1\u0129\u00d0\u00b8", - "\u0120Random", - "\u00eb\u00b3\u0126", - "kah", - "\u0120m\u00c3\u00b6j", - "\u0120s\u00c3\u00a4ger", - "\u0120\u00d7\u013b\u00d7\u013d\u00d7\u0137\u00d7\u013e", - "\u0120190", - "xtures", - "Eu", - "\u0120g\u00c3\u00a4", - "\u0120\u00d7\u0133\u00d7\u00aa", - "\u0120Croat", - "apo", - "PLE", - "\u0120persistence", - "\u00e5\u012c\u00a9", - "\u0120blends", - "\u0120treffen", - "\u0120Santiago", - "ydia", - "aldo", - "\u0120TensorFlow", - "\u0120Dual", - "\u00e3\u0125\u013e", - "\u0120chiff", - "\u00ec\u0139\u00b4", - "\u0120contracted", - "\u0120segreg", - "\u0120Fairy", - "\u0120wisely", - "\u0120vulnerabilities", - "\u0120handheld", - "\u0120gadgets", - "\u0120bo\u00c5\u0141", - "\u0120Popular", - "\u0120curvature", - "\u00eb\u00ac\u00b8", - "\u0120MARY", - "\u00ec\u013f\u00b4\u00ec\u012c", - "\u0120formulation", - "\u0120celery", - "\u0120blurry", - "\u0120TS", - "alez", - "\u0120ws", - "\u0120programm", - "\u0120Stack", - "\u0120JIM", - "\u00d0\u00be\u00d0\u00b2\u00d0\u00b0\u00d0\u00bb\u00d0\u00b8", - "\u00c4\u00b1ll", - "\u0120p\u00c3\u00a8re", - "\u0120Kanye", - "\u0120Delaware", - "\u0120\u00e3\u0123\u0142", - "\u0120daunting", - "\u0120\u00d0\u00b1\u00d0\u00b5\u00d1\u0123", - "\u0120Stupid", - "big", - "fficial", - "\u0120precipitation", - "\u0120plung", - "\u00e1\u00bb\u00a5c", - "burse", - "\u0120darle", - "\u0120cripp", - "\u0120pioneer", - "\u0120disput", - "\u0120sean", - "\u00e3\u0123\u0135\u00e3\u0124\u0135\u00e3\u0123\u00aa", - "\u0120resistor", - "\u0120allein", - "ipples", - "arel", - "\u0120endors", - "zust", - "\u0120\u00d1\u0122\u00d0\u00b5\u00d0\u00b1\u00d1\u0131\u00d1\u0124\u00d0\u00b0", - "eded", - "\u0120\u00ec\u00b9\u00b4\u00eb\u00a9\u0136\u00eb", - "\u0120lleva", - "\u0120kennt", - "\u0120\u00d0\u00b1\u00d0\u00b0\u00d0\u00bb", - "\u0120Document", - "\u0120Knights", - "\u0120buckle", - "\u0120\u00ec\u012b\u00ac", - "\u0120alk", - "\u0120Everyday", - "atters", - "\u0120toilets", - "\u0120jugar", - "\u0120\u00ec\u0140\u012a\u00ec\u00a7\u0122", - "\u0120genauso", - "\u0120Landesregierung", - "\u00e3\u0123\u00a3\u00e3\u0123\u00b1", - "ije", - "\u0120trailers", - "\u0120Tigers", - "\u0120gitti", - "\u0120forgiving", - "\u0120concurrent", - "\u0120Vu", - "\u0120\u00ed\u012c\u00b9\u00ed\u0140\u012a", - "\u0120BROWN", - "ounded", - "\";", - "\u0120tremb", - "\u0120tiet", - "\u0120\u00d1\u0122\u00d0\u00b5\u00d0\u00b6\u00d0\u00b8\u00d0\u00bc", - "\u0120nutshell", - "\u00d0\u00b5\u00d0\u00bb\u00d0\u00b8\u00d1\u0129", - "\u0120losers", - "ricting", - "\u0120redeem", - "defined", - "Nice", - "\u0120broadband", - "KO", - "\u0120teasing", - "\u0120partisan", - "\u00c4\u00b1ma", - "\u0120\u00ec\u0140\u00ac\u00eb\u00af\u00b8", - "\u0120Journey", - "\u0120slopes", - "uning", - "grunts", - "\u0120t\u00c3\u00a4ll", - "\u0120uncovered", - "\u0120my\u00c5\u013dl\u00c4\u013b", - "\u0120Esther", - "\u00e4\u00ba\u0130", - "\u0120Healthy", - "\u0120\u00eb\u00b0\u0133", - "r\u00c3\u00a9e", - "\u0120polarization", - "\u0120flav", - "\u0120cambiar", - "\u0120yr", - "\u0120Ranch", - "\u0120splits", - "\u0120trouv\u00c3\u00a9", - "\u00e5\u013e\u012d\u00e5\u00ae\u00b6", - "\u0120recorder", - "\u0120d\u00c3\u00a9part", - "\u00d9\u012a\u00d8\u00a8", - "\u0120Kry", - "\u0120interessant", - "\u0120ederim", - "\u00c5\u013dwiad", - "ilateral", - "wright", - "\u0120pourra", - "\u00c3\u00aater", - "\u0120camel", - "\u00e1\u0140", - "\u0120rapidement", - "\u0120mej", - "\u0120stiffness", - "ADAS", - "\u0120differs", - "\u0120alot", - "\u0120Sig", - "\u00d1\u0131\u00d1\u0124\u00d0\u00b5\u00d0\u00bb\u00d1\u012e", - "\u0120abstraction", - "\u00e5\u013e\u013a", - "\u0120keiner", - "grupp", - "\u0120Sherlock", - "\u00ed\u013a\u0136", - "\u0120cite", - "\u0120overflow", - "\u0120t\u00e1\u00ba\u00a1i", - "\u00c3\u00bacar", - "bula", - "\u0120conjunto", - "\u0120CI", - "\u0120moderator", - "\u0120indirectly", - "\u0120alleine", - "\u00e2\u0124", - "\u00d1\u012a\u00d0\u00b8\u00d0\u00b1", - "\u0120\u00d0\u00b1\u00d0\u00b0\u00d0\u00b1", - "\u0120danach", - "\u01201939", - "\u0120promet", - "\u0120destinations", - "\u0120Illust", - "\u00ce\u00b9\u00ce\u00ba\u00cf\u012e", - "\u0120sabes", - "\u0120heh", - "\u0120Gesetzent", - "\u0120Miz", - "\u00d0\u00b5\u00d0\u00bd\u00d0\u00ba\u00d0\u00be", - "\u0120Mys", - "\u00d0\u00ac", - "\u0120Judaism", - "\u0120mustache", - "\u0120stimmt", - "\u0120Gaza", - "\u0120volte", - "\u0120nuo", - "\u0120m\u00c3\u00b3n", - "\u0120Comput", - "\u00e0\u00b8\u00b9\u00e0\u00b9\u012a", - "\u0120Radi", - "\u0120exceptionally", - "\u0120assumes", - "\u00e9\u0138\u012d\u00e5\u00bf\u0125", - "\u00e3\u0123\u012a\u00e3\u0123\u00b0", - "inform", - "\u0120shrine", - "\u00e6\u0135\u012c", - "\u0120implication", - "\u0120Fitz", - "\u00e6\u00b2\u0134\u00e9\u0139\u013e\u00e4\u00bf\u0124", - "!.", - "\u0120lt", - "\u0120alloy", - "\u0120ethic", - "\u0120monastery", - "\u00ec\u012d\u013e\u00ec\u00a3\u0142", - "ica\u00c3\u00a7\u00c3\u00a3o", - "\u0120coordinating", - "\u0120Moto", - "\u0120overlook", - "\u0120chois", - "\u0120antibiotic", - "\u0120Minne", - "\u0120BJ", - "\u0120Apa", - "orian", - "\u0120spilled", - "Jam", - "\u0120husbands", - "\u0120creations", - "\u0120a\u00c3\u00b1", - "\u00c3\u00bcssel", - "\u0120\u00ec\u013f\u00b4\u00ec\u013c\u00a9", - "\u0120analyse", - "rose", - "\u0120punched", - "\u0120presque", - "\u0120astronomy", - "\u0120schwierig", - "\u0120Ebola", - "\u0120cis", - "\u0120acet", - "\u0120FX", - "endre", - "\u0120\u00ec\u013f\u012e\u00ec\u0137\u0127", - "\u0120webpage", - "\u0120freaked", - "\u0120latte", - "\u0120\u00ec\u00bf\u0142", - "\u0120\u00eb\u00a8\u00b8\u00eb", - "Never", - "Gra", - "\u00ed\u013b\u0136\u00eb\u00a5\u00bc", - "eyed", - "\u0120\u00eb\u00b0\u013e\u00eb\u013f\u00bc", - "\u0120espera", - "\u0120aparece", - "ra\u00c3\u00a7\u00c3\u00a3o", - "\u0120disruptive", - "\u0120Joint", - "urous", - "reas", - "\u0120quer\u00c3\u0143a", - "\u0120distributions", - "\u0120exponent", - "\u00ec\u00b9\u013a\u00eb\u00a5\u00bc", - "\u0120dl", - "zhou", - "\u0120Hearing", - "\u00e5\u00b7\u00ae\u00e4\u00b8\u012f\u00e5\u00a4\u013c", - "\u0120Craw", - "\u0120floats", - "ounced", - "Lab", - "World", - "\u0120burdens", - "\u0120authoritarian", - "\u0120Bolt", - "\u0120\u00d0\u00be\u00d0\u00b4\u00d0\u00bd\u00d1\u0125", - "\u0120pigeon", - "\u0120distractions", - "\u0120Herausforder", - "\u0120zest", - "esc", - "\u0120shakes", - "atas", - "\u0120\u00d9\u0127\u00d8\u00b4", - "holes", - "\u0120thinkers", - "alta", - "\u0120arche", - "\u0120Suk", - "anha", - "\u0120tempting", - "\u0120youtuber", - "\u0120v\u00c3\u00ac", - "\u0120dzia\u00c5\u0124a", - "\u0120Vatican", - "Park", - "\u0120supers", - "\u0120Nikki", - "\u00eb\u012c\u0132\u00eb", - "orang", - "ramient", - "\u00e9\u00ac\u00bc", - "\u0120\u00ea\u00b0\u0138\u00ea\u00b3\u0142", - "\u0120desserts", - "\u0120avere", - "\u0120Gregory", - "\u0120\u00eb\u0135\u00a4\u00ec\u0138\u00b4\u00ec\u013a", - "\u0120costing", - "\u0120Clinic", - "\u0120rebels", - "\u0120Mob", - "\u0120bunlar", - "\u0120Yours", - "ertime", - "\u0120retali", - "mara", - "atus", - "alles", - "\u0120\u00d0\u00b4\u00d1\u0122", - "\u0120\u00d0\u00b4\u00d0\u00b8\u00d1\u0123", - "\u0120discounts", - "\u0120GUY", - "\u0120\u00d0\u00ba\u00d0\u00b0\u00d0\u00ba\u00d0\u00be\u00d0\u00b5", - "\u0120Experiment", - "rement", - "\u0120Xiang", - "\u0120bate", - "WE", - "\u0120specialize", - "\u0120deity", - "\u0120Loki", - "mag", - "\u0120Nit", - "West", - "\u0120maternal", - "\u0120quis", - "\u00e5\u0141\u00ba\u00e6\u013e\u00ac", - "broken", - "\u0120lasers", - "\u0120hakk", - "\u0120Angels", - "\u0120mastery", - "antis", - "Tiffany", - "eee", - "\u00e7\u0133", - "orem", - "\u0120inacc", - "\u0120jurisdictions", - "\u0120Kardash", - "\u00e6\u013e\u00ba", - "Il", - "\u0120Sinn", - "\u00e5\u012d\u0137\u00e7\u0136\u00bb", - "\u0120athletics", - "c\u00c4\u013b", - "\u0120loosely", - "\u0120dieta", - "Ag", - "\u0120??", - "\u0120\u00eb\u012e\u0122\u00ed\u0133\u013e", - "\u0120superv", - "\u0120nutrit", - "\u0120drifting", - "\u0120\u00ec\u0126\u0142\u00ec\u0125\u013f\u00eb\u012d\u013a", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00bd\u00d1\u0131\u00d0\u00bb", - "\u0120Victory", - "\u00d9\u0126\u00d8\u00a9", - "\u00d7\u0137\u00d7\u0142\u00d7\u0136", - "\u0120\u00d0\u00bf\u00d0\u00b8\u00d1\u012a", - "\u0120shaved", - "\u0120mesure", - "onden", - "\u00d9\u0125\u00d8\u00b1", - "\u0120exile", - "\u0120Desde", - "\u0120Pinterest", - "\u0120attachments", - "\u0120hombres", - "\u0120fines", - "\u0120\u00ec\u0126\u00b8\u00ec\u0125\u0123", - "\u0120sleeps", - "\u0120Taco", - "\u0120IRA", - "rios", - "\u0120oll", - "etes", - "\u0120unut", - "fashioned", - "\u0120treball", - "\u0120Nearly", - "\u0120\u00d1\u0122\u00d0\u00b5\u00d0\u00b0\u00d0\u00bb\u00d1\u012e\u00d0\u00bd\u00d0\u00be", - "\u0120chil", - "\u00e9\u0122\u00b1", - "\u00c4\u0141a", - "\u0120MEL", - "roscop", - "\u0120CG", - "\u0120venge", - "\u0120dishwasher", - "algic", - "\u0120modifier", - "\u0120embassy", - "timer", - "emics", - "\u0120intricate", - "\u0120evet", - "\u0120\u00eb\u012e\u0122\u00eb\u00b0\u0137", - "\u0120isot", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d1\u0125\u00d1\u0129", - "\u0120Quiz", - "reso", - "\u00ce\u00b4\u00cf\u0130", - "\u0120yelled", - "\u0120feder", - "ELLER", - "\u0120exceeded", - "onas", - "icano", - "\u0120\u00d0\u00b6\u00d0\u00b8\u00d0\u00b2\u00d0\u00be\u00d1\u0124", - "\u0120Mao", - "\u0120Kazuto", - "\u0120\u00e3\u0127\u012d\u00e3\u0127\u012d\u00e3\u0127\u012d\u00e3\u0127\u012d", - "\u0120frontline", - "\u0120Hungarian", - "\u0120\u00c3\u00bcberall", - "awat", - "\u0120grips", - "i\u00c3\u00a7\u00c3\u00b5es", - "arnya", - "\u0120\u00cd\u00a1", - "\u0120seid", - "\u0120anak", - "\u0120acabou", - "\u00ed\u0137\u0133", - "\u0120notorious", - "\u0120Godzilla", - "\u0120overcoming", - "\u0120Pend", - "\u0120olabilir", - "\u00c3\u00bclme", - "\u0120erhalten", - "\u00e3\u0124\u012b\u00e3\u0123\u0126", - "\u00ea\u00b7\u00b9", - "\u0120Meter", - "\u0120staan", - "Ol", - "\u0120chats", - "\u0120Buenos", - "\u00c3\u0143ve", - "aluable", - "\u0120strategically", - "\u0120comprised", - "\u0120\u00d0\u00bf\u00d0\u00b5\u00d1\u0122\u00d1\u0123\u00d0\u00be\u00d0\u00bd\u00d0\u00b0\u00d0\u00b6", - "\u0120wann", - "\u0120Cen", - "\u00d0\u00bd\u00d0\u00b8\u00d1\u0124\u00d0\u00b5", - "\u0141\u0123", - "\u0120\u00d1\u0124\u00d0\u00be\u00d0\u00b1\u00d0\u00be\u00d0\u00b9", - "iad", - "\u0120karde\u00c5\u0141im", - "\u0120Congressman", - "reaming", - "homme", - "\u0120communaut", - "\u0120alcoholic", - "\u0120pickled", - "\u0120acord", - "position", - "eg\u00c3\u00b3l", - "\u0120troubling", - "\u0120Marcheg", - "\u0120zumindest", - "\u0120seamlessly", - "\u0120olun", - "\u0120TVs", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b0\u00d0\u00ba\u00d1\u0124\u00d0\u00b8\u00d1\u0129\u00d0\u00b5\u00d1\u0123\u00d0\u00ba\u00d0\u00b8", - "\u0120backend", - "\u00e3\u0123\u0135\u00e3\u0124\u0135\u00e3\u0123\u00ab\u00e3\u0123\u00a1\u00e3\u0123\u00af", - "idable", - "\u0120gadget", - "\u0120fa\u00c3\u00a7o", - "\u0120Marchegiani", - "\u0120\u00eb\u00b0\u00a4", - "\u0120accidental", - "\u0120LP", - "\u0120eldest", - "\u0120Admiral", - "\u0120n\u00c4\u0125m", - "lever", - "\u0120pastel", - "\u0120fondo", - "Connie", - "\u0120tercer", - "\u0120pact", - "\u0120Monte", - "\u0120meats", - "\u0120SMS", - "\u0120Australians", - "\u00e7\u00bc", - "Rhett", - "\u0120exactement", - "\u0120\u00eb\u00b9\u00bc", - "\u0120MOD", - "\u00e7\u00a1", - "\u0120Rapt", - "\u0120Noch", - "\u0120abort", - "\u0120Naval", - "\u0120Fuji", - "INTER", - "\u0120\u00d0\u00bd\u00d0\u00be\u00d0\u00b2\u00d1\u012d\u00d0\u00b9", - "\u0120miejsce", - "\u0120ICU", - "\u0120Graduate", - "\u0120Glen", - "ardi", - "\u0120\u00c8\u013a", - "\u0120solder", - "\u0120professions", - "\u0120orthog", - "omn", - "introdu", - "\u0120Denise", - "\u00ec\u0140\u0132\u00eb\u00a5\u00bc", - "\u0120correspondence", - "AMA", - "\u0120inflict", - "\u0120fand", - "\u0120G\u00c3\u00bc", - "\u0120\u00d1\u0129\u00d0\u00b5\u00d1\u0124", - "\u0120traced", - "\u0120patents", - "\u0120ambush", - "\u0120lotta", - "ffer", - "\u0120Wagner", - "\u0120imperson", - "\u0120extr\u00c3\u00aamement", - "\u00d9\u0124\u00d8\u00aa", - "conduct", - "Att", - "\u0120Mueller", - "\u0120Alicia", - "\u0120cyc", - "\u0120hacker", - "\u0120tys", - "\u0120hail", - "\u0120\u00d0\u00b7\u00d0\u00b0\u00d1\u0131\u00d0\u00b2", - "\u0120passo", - "\u0120\u00ec\u00b6\u0136\u00ea\u00b0\u0122", - "\u0120\u00ce\u012a", - "\u0120packaged", - "\u0120Cynthia", - "heet", - "\u00e4\u00b8\u0143\u00e5\u013d\u00bd", - "\u0120Nissan", - "\u0120Questo", - "\u00e9\u00a8", - "did", - "\u0120\u00ce\u00bc\u00ce\u00b9\u00ce\u00b1", - "\u0120Ellis", - "\u0120Analysis", - "cemos", - "\u0120aseg", - "\u0120Myster", - "\u0120Cao", - "\u0120tuv", - "\u0120Industry", - "\u00ec\u00a3\u00bc\u00ea\u00b3\u0142", - "otal", - "\u0120peque\u00c3\u00b1o", - "bras", - "\u0120comprehend", - "\u0120Simpson", - "\u00d1\u0123\u00d1\u0124\u00d0\u00b2\u00d0\u00b8\u00d0\u00b5", - "ocracy", - "\u00d0\u00b8\u00d1\u0129\u00d0\u00b5\u00d1\u0123\u00d0\u00ba\u00d0\u00b8", - "\u0120Mush", - "\u0120Laurie", - "\u0120triangular", - "\u0120Presents", - "\u0120Kunden", - "\u00e7\u00b4\u00b9", - "\u00e6\u0143\u00a6", - "\u0120Iss", - "\u0120Deck", - "\u00e1\u00bb\u0125n", - "\u0120Darkness", - "\u0120inflammatory", - "eremiah", - "\u0120warmed", - "veyard", - "\u0120Memory", - "etty", - "\u0120taxpayers", - "\u00e0\u00b8\u0135", - "\u00d8\u00a1", - "\u0120practise", - "\u00eb\u012d\u00ac\u00eb", - "\u0120drilled", - "m\u00c3\u00bc\u00c5\u0141", - "logo", - "\u0120Fach", - "\u00a4\u00eb\u00a1\u013e", - "\u0120\u00c3\u00bcbrigens", - "\u0120konnten", - "\u0120normalmente", - "\u0120argues", - "ilingual", - "\u00b0\u00eb\u00a5\u00bc", - "egal", - "\u0120travaill", - "ovy", - "\u00d0\u00b0\u00d1\u0124\u00d0\u00be", - "\u0120ruth", - "\u0120Lights", - "\u0120consisted", - "\u00d7\u0133\u00d7\u00a8\u00d7\u013b\u00d7\u013f", - "\u0120stereotype", - "\u0120payer", - "\u0120Ree", - "\u0120Airbnb", - "\u0120drowned", - "\u0120Zoe", - "\u0120canopy", - "\u0120barr", - "\u0120\u00d0\u00bd\u00d0\u00be\u00d1\u0129", - "\u0120pagan", - "\u0120jars", - "\u0120r\u00c3\u00aa", - "erver", - "\u00e6\u012a\u00bf", - "ieben", - "\u0120espect", - "\u0120Fi", - "\u0120unwilling", - "\u0120technician", - "\u00e1\u00ba\u00b7t", - "member", - "\u0120Canal", - "\u00d8\u00b3\u00d9\u0127", - "\u0120lieber", - "\u0120inference", - "\u0120honoring", - "\u00e5\u0133\u00b5", - "\u0120Campaign", - "\u0120lineage", - "\u0120Stress", - "\u0120victories", - "\u0120deja", - "\u00d7\u00a3", - "\u00c3\u00aates", - "blick", - "\u0120\u00d0\u00bc\u00d0\u00b5\u00d0\u00bd\u00d0\u00b5\u00d0\u00b5", - "oths", - "\u0120Couple", - "Jason", - "\u0120Nicolas", - "\u00d0\u00b5\u00d0\u00ba\u00d1\u0123", - "lib", - "\u0120herramient", - "\u0120\u00d7\u0132\u00d7\u0137\u00d7\u0140\u00d7\u00a8", - "\u0120\u00d0\u00b2\u00d0\u00b8\u00d0\u00b4\u00d0\u00b8\u00d0\u00bc", - "millimeter", - "\u0120silhouette", - "\u0120driveway", - "\u0120cherish", - "\u00e3\u0127\u0142\u00e3\u0127\u0142", - "\u0120ransom", - "\u0120interdisciplinary", - "\u0120Portal", - "\u0120trag", - "thood", - "\u0120tedious", - "\u0120glossy", - "\u0120pr\u00c3\u00a9par", - "\u0120Cay", - "\u0120Took", - "\u0120Bottom", - "\u0120zig", - "\u00e5\u00ab", - "\u00e5\u012f\u00b1", - "represented", - "\u00e0\u00b9\u0122\u00e0\u00b8\u00a5\u00e0\u00b8\u00a2", - "\u0120desarrollo", - "\u00ec\u0126\u013e\u00eb", - "\u0120viscos", - "\u0120milligram", - "\u0120Gund", - "\u0120ferment", - "drum", - "\u0120drawers", - "Laugh", - "\u0120pelos", - "\u0120pavement", - "\u0120memoir", - "avait", - "\u01202050", - "\u00a4\u00eb\u00a5\u00bc", - "\u0120raz\u00c3\u00b3n", - "\u0120flourish", - "\u0120stern", - "\u00e4\u00b8\u012a", - "\u0120Chung", - "\u0120serpent", - "\u0120Gentlemen", - "\u00e7\u013e\u0141\u00e7\u013c\u0126\u00e5\u00be\u012a", - "kook", - "\u0120lut", - "importe", - "parent", - "\u0120wsz", - "\u0120scree", - "\u0120Mitarbeiter", - "\u00e5\u00b7\u00b4", - "mut", - "\u0120\u00ec\u0138\u013a\u00ea\u00b8\u00b0\u00eb\u00a5\u00bc", - "\u0120semble", - "\u0120OW", - "\u0120investigator", - "\u0120Cheryl", - "\u0120Gerald", - "\u0120prere", - "\u0120compares", - "nyt", - "\u0120diferen\u00c3\u00a7a", - "?-", - "\u0120qu\u00c3\u00a1", - "\u00d7\u00a8\u00d7\u013b", - "Sen", - "\u0120heps", - "\u0120gratuit", - "\u0120consort", - "\u0120STOP", - "\u0120Protestant", - "\u0120electrode", - "\u00e2\u0139", - "\u0120securely", - "\u00d0\u00b8\u00d1\u0129\u00d0\u00b5\u00d1\u0123\u00d0\u00ba\u00d0\u00be\u00d0\u00b9", - "\u0120t\u00c3\u00a4\u00c3\u00a4", - "\u0120registers", - "\u0120Heavenly", - "ogly", - "iss\u00c3\u00a4", - "\u0120Physics", - "\u0120Merkel", - "\u0120r\u00c3\u00a9v", - "\u00e9\u013b\u00a2", - "\u0120erased", - "\u0120Sacramento", - "\u0120coffin", - "\u0120exacer", - "\u0120lanz", - "\u0120poets", - "ulif", - "\u0120\u00ec\u00b9\u013a\u00eb", - "\u0120Nerd", - "\u0120NCT", - "\u0120Hour", - "nehmer", - "\u0140\u013a\u00eb\u0131\u0126", - "\u0120Princi", - "Sw", - "mies", - "armed", - "\u0120Beatles", - "\u0120propagation", - "\u0120exchanged", - "\u0120cumulative", - "\u0120\u00ec\u00a7\u0133\u00ec\u0139\u0132", - "\u0120defeating", - "\u00e6\u012c\u00b1", - "bels", - "\u0120wes", - "\u0120Odyssey", - "\u00e4\u00bd\u0142\u00e6\u0125\u00b3", - "avior", - "\u0120\u00ec\u013e\u0126\u00ec\u0139\u0132", - "\u0120brit", - "\u0120hijo", - "DAY", - "\u0120\u00d8\u00a7\u00d9\u0126\u00d8\u00aa\u00d9\u012c", - "\u0120\u00d0\u00a1\u00d0\u00b5\u00d1\u0122\u00d0\u00b3", - "\u00d1\u0125\u00d0\u00ba\u00d0\u00b0", - "edsi\u00c4\u013b", - "\u0120impos", - "\u0120ellas", - "\u0120firearms", - "\u0120NR", - "\u0120\u00d7\u0133\u00d7\u0132", - "\u0120\u00d0\u0141\u00d0\u00be\u00d0\u00ba\u00d0\u00b0", - "awi", - "\u0120\u00ec\u0126\u00b1\u00ea\u00b3\u00b5", - "\u0120pupils", - "\u0120Tack", - "\u0120frase", - "\u0120Ship", - "\u0120stad", - "\u00e4\u00b8\u013e", - "\u0120Greater", - "unun", - "immung", - "grown", - "\u0120NXT", - "\u0120Americas", - "fox", - "\u0120manten", - "\u00e9\u0142\u0132\u00e5\u0124\u013b", - "\u0120\u00d1\u0123\u00d0\u00be\u00d0\u00ba", - "\u0120rikt", - "lectric", - "deep", - "\u0120\u00d0\u00b7\u00d0\u00bd\u00d0\u00b0\u00d0\u00b5\u00d1\u012a\u00d1\u012e", - "\u0120benut", - "\u0120Infrast", - "\u0120Emir", - "\u0120\u00d0\u00be\u00d1\u0124\u00d0\u00bf\u00d1\u0122\u00d0\u00b0\u00d0\u00b2", - "\u0120Kimchi", - "\u0120Finnish", - "\u00b4\u00ec\u0142\u0123", - "inaire", - "\u0120oike", - "\u00e6\u00b8\u0127\u00e6\u00a5\u013c", - "\u0120hostage", - "\u0120Button", - "\u00d9\u0124\u00d9\u012c", - "eking", - "\u0120Kazakh", - "\u0120comforting", - "\u0120sog", - "\u0120greeted", - "guitar", - "payer", - "\u0120relational", - "\u0120construir", - "\u00e7\u012b\u00b9\u00e5\u012a\u00a5", - "opian", - "\u0120Volume", - "ieth", - "\u00d1\u0123\u00d1\u0124\u00d0\u00b2\u00d0\u00be\u00d0\u00bc", - "urrection", - "li\u00c5\u013dmy", - "\u0120hemisphere", - "\u0120Bean", - "IGN", - "\u0120k\u00c3\u00b6t\u00c3\u00bc", - "\u0120Fallout", - "\u0120brace", - "\u00e7\u00b9\u00bc\u00e7\u00ba\u012e", - "\u00cf\u0122\u00ce\u00ac", - "\u0120HAS", - "\u0120g\u00c3\u00a9", - "\u0120characterize", - "\u00e1\u00ba\u00b7c", - "\u0120Milky", - "\u0120tumors", - "\u0120nuit", - "\u0120Gaz", - "\u0120\u00ec\u0140\u012a\u00eb\u012d\u00a4\u00eb\u012c\u0136", - "\u0120\u00d0\u00b3\u00d0\u00b0\u00d1\u0122", - "essment", - "\u0120Abe", - "\u0120\u00eb\u00bd\u0133", - "\u0120Einsatz", - "JIN", - "j\u00c3\u00a4", - "Cry", - "\u0120Promised", - "\u0120\u00d1\u0123\u00d0\u00b5\u00d1\u0122\u00d0\u00b4", - "okus", - "\u0120scalable", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d1\u0123\u00d0\u00bc\u00d0\u00be\u00d1\u0124\u00d1\u0122\u00d0\u00b5\u00d1\u0124\u00d1\u012e", - "\u00c3\u00bccklich", - "\u0120realism", - "\u0120mayo", - "\u0120juvenile", - "\u0120headlights", - "\u0120g\u00c3\u00b6r\u00c3\u00bc\u00c5\u0141", - "\u0120Reform", - "\u0120halves", - "czne", - "\u0120breakup", - "\u00c5\u00bcej", - "\u0120r\u00c3\u00a4tt", - "Day", - "\u0120\u00ec\u013f\u00bc\u00eb\u00b3\u00b8", - "\u0120muerte", - "\u0120tunes", - "\u0120Smile", - "record", - "\u0120recherche", - "atisfied", - "\u0120pozi", - "\u0120celebrations", - "isexual", - "\u0120ROB", - "thirds", - "\u0120Fortune", - "\u0120\u00d1\u0124\u00d0\u00be\u00d0\u00b9", - "\u0120branded", - "loo", - "\u0120dud", - "\u0120randomized", - "\u0120combin", - "\u00e4\u00b8\u0122\u00e4\u00ba\u013d", - "ieran", - "czenia", - "\u012f\u00e3\u0125\u00ab", - "\u0120curator", - "\u0120artery", - "\u0120\u00d1\u0125\u00d1\u012a", - "\u0120\u00d1\u0129\u00d0\u00b8\u00d1\u0124", - "\u0120subsidies", - "\u0120blossom", - "\u0120Twilight", - "\u0120hyv\u00c3\u00a4", - "\u0120Pompe", - "\u0120Cisco", - "\u0120\u00d0\u0141\u00d1\u0122\u00d0\u00be", - "\u0120biri", - "\u0120gern", - "\u0120rebuilt", - "\u0120wcze", - "\u0120benefici", - "\u0120drummer", - "\u0120solids", - "\u0120diyorsun", - "\u00e3\u0123\u0124\u00e3\u0124\u012c\u00e3\u0123\u012e\u00e3\u0123\u00a8\u00e3\u0123\u0128\u00e3\u0123\u0136\u00e3\u0123\u0138\u00e3\u0123\u0126\u00e3\u0123\u00be\u00e3\u0123\u0139\u00e3\u0123\u0141", - "lated", - "\u0120muddy", - "\u0120holog", - "\u0120claps", - "\u0120Rings", - "\u0120Okey", - "\u0120Brave", - "\u0120valuation", - "\u0120migrant", - "\u0120intermitt", - "\u0120eigene", - "iliary", - "\u00e3\u0125\u00bc\u00e3\u0125\u012a", - "markt", - "kr", - "\u0120Rib", - "\u00e1\u00bb\u013bi", - "\u0120accusations", - "\u0120arab", - "wash", - "\u0120Bardzo", - "\u0120ugh", - "esters", - "ophren", - "\u0120alimentos", - "\u0120Uz", - "\u00d6\u0124", - "\u0120650", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b8\u00d0\u00b5\u00d1\u0127", - "FI", - "\u0120sampai", - "\u0120parl\u00c3\u00a9", - "hesion", - "\u0120s\u00c4\u00b1r", - "\u0120apparatus", - "\u0120correlated", - "\u0120Principal", - "\u0120corr", - "\u0120Official", - "\u00d0\u00b8\u00d1\u0129\u00d0\u00b5\u00d1\u0123\u00d0\u00ba\u00d0\u00b8\u00d0\u00b5", - "\u0120terminals", - "Should", - "\u0120vacun", - "\u0120stellt", - "\u0120mooi", - "etzung", - "\u0120\u00d0\u00ba\u00d1\u0122\u00d0\u00b0", - "\u0120dai", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00b6", - "Team", - "\u0120PPE", - "\u0120\u00d0\u0140\u00d1\u0123", - "\u0120Leah", - "\u0120Ivy", - "yst", - "\u0120uhhh", - "\u0120nighttime", - "\u0120trendy", - "\u0120securities", - "\u0120continents", - "\u0120firsthand", - "\u0120Veron", - "\u0120\u00eb\u0124\u00ae", - "\u0120browsing", - "\u0120Cada", - "tro", - "\u0120tramp", - "reib", - "\u0120erstmal", - "irler", - "\u0120psic", - "\u0120getir", - "\u0120NP", - "\u0120dzieci", - "\u00d0\u00be\u00d0\u00b1\u00d1\u0122\u00d0\u00b0\u00d0\u00b7", - "\u0120magician", - "\u0120scrutiny", - "\u0120slab", - "\u0120OT", - "isty", - "iries", - "orest", - "\u0120tasked", - "\u0120morally", - "\u00ec\u0137\u00bc\u00ec\u00a7\u0122", - "ustered", - "\u0120fools", - "\u0120irrespons", - "\u0120einf", - "\u0120vi\u00e1\u00bb\u0129c", - "\u0120scor", - "\u0120pillows", - "\u0120Gegen", - "\u0120tutte", - "\u0120quarterly", - "\u0120didnt", - "\u0120Gym", - "\u0120Ether", - "\u0120\u00d8\u00ab", - "\u00d0\u00bb\u00d0\u00b8\u00d1\u012a\u00d0\u00ba\u00d0\u00be\u00d0\u00bc", - "\u0120signaling", - "\u0120Node", - "\u0120Doncs", - "\u0120yah", - "\u0120Kanal", - "\u0120fading", - "etin", - "\u0120influencers", - "\u0120medals", - "\u0120engineered", - "\u0120fermented", - "\u00ea\u00b2\u0142\u00ec\u00a7\u0122\u00eb\u00a7\u012e", - "\u0120Beethoven", - "\u00d7\u0140\u00d7\u00a9", - "inental", - "\u0120\u00ec\u0137\u012e\u00eb\u0142\u00a4", - "\u00c3\u00bctfen", - "alnya", - "\u0120overe", - "\u0120denkt", - "\u00d0\u00b0\u00d0\u00ba\u00d1\u0124\u00d0\u00b5\u00d1\u0122", - "\u0120\u00e2\u013a", - "\u0120necesit", - "\u0120generators", - "grass", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00b4\u00d1\u0125\u00d0\u00bc", - "lie\u00c3\u0141en", - "Bar", - "\u013e\u00eb\u0131\u013b", - "\u0120\u00d0\u00b4\u00d0\u00b5\u00d1\u0124\u00d0\u00b5\u00d0\u00b9", - "\u0120sucking", - "\u0120stencil", - "\u0120primo", - "\u0120Breath", - "strom", - "\u0120immensely", - "\u0120appreh", - "\u00ec\u0142\u0137\u00ec\u013f\u00b4", - "Pop", - "\u0120jong", - "\u0120Giul", - "\u0120ADHD", - "\u0120h\u00c3\u00b6ren", - "\u0120elo", - "ivent", - "\u0120rus", - "\u0120outrageous", - "\u0120mastered", - "\u0120\u00ec\u00bb\u00a4", - "\u00d9\u012a\u00d9\u0123", - "ipes", - "\u0120Rudy", - "Jacob", - "\u0120bullish", - "\u0120tapped", - "\u0120faud", - "izophren", - "\u0120\u00d1\u0123\u00d0\u00be\u00d1\u0127", - "\u0120Darling", - "\u01201963", - "\u0120Prevention", - "\u00b2\u0136", - "\u0120abdominal", - "stones", - "\u0120avaient", - "\u00e1\u00bb\u0137i", - "make", - "\u0120sare", - "\u0120Instant", - "\u00d0\u00ba\u00d0\u00b0\u00d0\u00bc", - "\u0120keeper", - "\u0120blankets", - "\u00e3\u0123\u00a7\u00e3\u0123\u0139\u00e3\u0124\u0129\u00e3\u0123\u0128", - "\u0120sweats", - "\u0120Minneapolis", - "\u00e5\u0127\u00a8\u00e9\u0125\u00a8", - "\u0120genommen", - "\u0120fasten", - "\u0120Brussels", - "\u00e5\u0133\u00bc", - "\u0120cafeter", - "\u0120absorbing", - "\u0120hago", - "\u0120Elmo", - "\u0120gusto", - "\u0120Yap", - "M\u00c3\u00basica", - "\u0120tert", - "\u0120banda", - "\u0120mily", - "\u0120thereafter", - "\u0120Stockholm", - "\u0120Carson", - "\u0120calibration", - "ava\u00c5\u0141", - "ansa", - "ikke", - "\u0120foresee", - "\u0120qualche", - "\u0120deste", - "\u00e6\u00a4", - "\u00c3\u00bcn\u00c3\u00bcz", - "\u0120forge", - "Dis", - "esten", - "\u0120\u00ce\u00b4\u00ce\u00b9\u00ce\u00b1", - "\u0120encaps", - "\u0120Gespr", - "\u0120chercher", - "ickets", - "\u00d1\u0124\u00d0\u00be\u00d1\u0122\u00d1\u012d", - "Cr", - "\u0120\u00d0\u00a2\u00d0\u00b0\u00d0\u00ba\u00d0\u00b6\u00d0\u00b5", - "\u0120rabbits", - "\u0120Dot", - "heiten", - "\u0120causal", - "\u0120Foster", - "aj\u00c4\u0127c", - "\u0120bereit", - "\u0120ayudar", - "\u00e9\u00ab\u013b", - "\u00e3\u0123\u00b3", - "song", - "comb", - "\u0120fringe", - "\u0120cybersecurity", - "\u0120\u00eb\u013e\u00a8", - "\u0120kier", - "\u0120besch\u00c3\u00a4ft", - "\u0120\u00d0\u00ba\u00d0\u00be\u00d0\u00bd\u00d1\u0128\u00d0\u00b5", - "\u0120facilit", - "\u0120Namen", - "\u0120bilateral", - "tx", - "\u0120Wissenschaft", - "\u0120nuances", - "\u0120ripping", - "\u0120fy", - "\u0120Sicherheit", - "\u0120Ghana", - "olon", - "\u0120topped", - "\u0120Morocco", - "\u0120radial", - "\u0120LEE", - "\u0120Andreas", - "edd", - "\u0120\u00ec\u0139\u00b4\u00eb", - "\u0120Airlines", - "\u00e3\u0123\u0135\u00e3\u0124\u012f", - "\u0120valores", - "\u00ea\u00b7\u013e", - "Hy", - "\u0120\u00d0\u00b7\u00d0\u00b0\u00d0\u00b4\u00d0\u00b0\u00d1\u0129", - "\u0120Kendall", - "\u0120\u00d1\u0127\u00d0\u00b0\u00d1\u0122", - "\u0120Vamp", - "\u0120python", - "\u0120manageable", - "\u0120Gente", - "oise", - "iciary", - "\u0120imposs", - "\u0120Bunny", - "iesta", - "Andrew", - "\u0120sert", - "\u0120Cec", - "zzarella", - "\u0120automobile", - "\u0120Tiere", - "allows", - "\u00e5\u0128\u0128", - "\u0120\u00eb\u00b0\u0122", - "\u0120Scorp", - "\u0120Jelly", - "agara", - "\u0120Stretch", - "\u0120redef", - "\u0120exacerb", - "\u0120SHA", - "\u00c3\u00a9f", - "orsa", - "\u0120flawed", - "\u0120Noel", - "?!?", - "\u0120procent", - "\u0120menstru", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d1\u0129", - "\u0120infants", - "\u00f0\u0141\u0130\u00b5", - "pause", - "\u0120Racing", - "\u01201948", - "\u0120superintendent", - "idores", - "idy", - "brahim", - "\u0120unlucky", - "\u0120perk", - "anci", - "\u0120\u00eb\u00a7\u012e\u00eb\u0124\u013a", - "\u0120\u00d0\u013e\u00d0\u00be\u00d1\u0123\u00d0\u00ba\u00d0\u00b2", - "\u0120finans", - "\u0120diferencia", - "\u0142\u012a\u00ec\u013f\u00b4", - "\u00e9\u0127\u012f", - "ORY", - "\u0120Tac", - "\u00db\u012e\u00d8\u00a7", - "\u0120desem", - "\u0120\u00d0\u00b2\u00d0\u00b0\u00d0\u00b6\u00d0\u00bd\u00d0\u00be", - "\u0120JU", - "\u0120\u00ec\u0140\u012a\u00ec\u0140\u0138\u00ec\u0137\u0126\u00ec\u013c\u0136", - "\u0120\u00ce\u013f", - "\u0120informations", - "\u0120HEL", - "hst", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00b3\u00d0\u00be\u00d0\u00b2\u00d0\u00be\u00d1\u0122", - "\u0120voiture", - "\u0120reus", - "\u00c3\u00a4ndig", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d1\u0127\u00d0\u00be\u00d0\u00b6", - "jing", - "\u0120dru", - "altra", - "\u0120produits", - "\u0120kite", - "\u0120eyeball", - "\u0120Belt", - "\u0120Restaurant", - "\u0120gamb", - "\u0120porridge", - "itters", - "\u0120converts", - "\u0120yard\u00c4\u00b1m", - "\u0120m\u00c3\u00a1ximo", - "wirtschaft", - "\u0120\u00ed\u0137\u013a\u00eb\u0124\u013a\u00eb", - "\u0120\u00ec\u00a4\u0122", - "\u0120iceberg", - "\u0120vorbei", - "\u0120256", - "ocratic", - "\u0120reckless", - "onner", - "\u0120m\u00c3\u00bas", - "\u0120logically", - "\u0120Prison", - "\u0120Netz", - "\u0120vacant", - "\u0120nimmt", - "\u0120HARR", - "\u0120\u00d0\u00b7\u00d0\u00be\u00d0\u00b2", - "\u0120Dee", - "ringe", - "niest", - "\u0120Rules", - "\u00ec\u012c\u00a4\u00eb\u0141\u00bd", - "cussions", - "\u0120floral", - "\u0120constrained", - "\u0120differentiation", - "\u0120Quebec", - "\u0120\u00db\u0123\u00db\u012e\u00da\u00ba", - "\u0120p\u00c3\u00bablica", - "itel", - "\u0120accommodations", - "\u0120Gr\u00c3\u00bc", - "\u00ed\u013e", - "\u0120pickles", - "\u00d0\u00b8\u00d1\u0129\u00d0\u00b5\u00d1\u0123\u00d0\u00ba\u00d0\u00b8\u00d1\u0127", - "\u0120commissions", - "\u0120Baek", - "\u0120\u00c3\u00a7ocu\u00c4\u0141", - "\u0120Medium", - "\u0120periodically", - "\u0120wonderfully", - "\u0120staffing", - "\u00ec\u013d\u0132\u00eb", - "rire", - "fle", - "\u0120McL", - "\u0120\u00d1\u0124\u00d0\u00b5\u00d0\u00bf", - "\u0120\u00d0\u00bf\u00d0\u00b5\u00d1\u0122\u00d0\u00b5\u00d0\u00ba", - "\u00d0\u00bd\u00d0\u00be\u00d0\u00bb\u00d0\u00be\u00d0\u00b3", - "\u0120\u00ed\u0123\u00ac\u00ea\u00b2\u012e", - "\u00e7\u013b\u00bc\u00e7\u0131\u00be", - "\u0120prosperous", - "\u0120Spiritual", - "\u0120Chick", - "DIA", - "\u0120\u00d0\u0141\u00d1\u0122\u00d0\u00b8\u00d0\u00b2\u00d0\u00b5\u00d1\u0124", - "\u0120per\u00c3\u0143", - "\u00d1\u012e\u00d1\u0130\u00d1\u0124", - "\u0120consultants", - "\u0120Earl", - "\u00e4\u00bb\u012c\u00e5\u00b9\u00b4", - "\u0120ruining", - "\u00d0\u00be\u00d1\u0122\u00d0\u00b5", - "\u0120penser", - "\u0120takiej", - "\u0120strengthened", - "\u0120Liquid", - "\u00d0\u00be\u00d0\u00bd\u00d0\u00b5\u00d1\u0128", - "\u00d0\u00b0\u00d0\u00b2\u00d0\u00b0\u00d1\u0124\u00d1\u012e", - "\u0120camer", - "\u0120disagreement", - "\u0120bathing", - "\u0120Yosh", - "aal", - "prechen", - "RISADAS", - "\u0120superstar", - "\u00e6\u0123\u0143", - "\u00d0\u00bb\u00d1\u0131\u00d1\u0124\u00d1\u012e", - "\u0120nib", - "\u0120Therm", - "\u0120DANIEL", - "\u0120paw", - "\u0120liquids", - "\u0120capacit", - "arken", - "\u0120vagina", - "\u0120mashed", - "\u0120emerges", - "yscy", - "\u0120unrelated", - "\u0120Guild", - "\u0120inverted", - "itives", - "Tra", - "\u0120begr", - "\u0120alte", - "\u00ec\u00a7\u0137", - "\u00e3\u0124\u0123\u00e3\u0123\u00a6", - "\u0120\u00d1\u0122\u00d0\u00b0\u00d0\u00b7\u00d1\u0122\u00d0\u00b0\u00d0\u00b1\u00d0\u00be\u00d1\u0124", - "finder", - "\u0120\u00d0\u00b4\u00d0\u00b0\u00d0\u00bb\u00d0\u00b5\u00d0\u00b5", - "\u0120\u00d0\u00b1\u00d0\u00bb\u00d0\u00b0\u00d0\u00b3\u00d0\u00be\u00d0\u00b4\u00d0\u00b0\u00d1\u0122", - "walker", - "\u0120crater", - "assadors", - "rences", - "inski", - "\u0120KIM", - "\u0120Elliot", - "2017", - "\u0120Sr", - "inka", - "anov", - "\u0120\u00ec\u0140\u013a\u00eb\u00aa\u00bb", - "\u0120proprietary", - "displaystyle", - "\u0120\u00d1\u0123\u00d0\u00b8\u00d0\u00bc", - "\u0120\u00d0\u00b8\u00d0\u00b7\u00d0\u00b1", - "\u0120Panel", - "\u0120instincts", - "\u0120Communications", - "\u00e9\u00ba\u00bb", - "midt", - "\u0120\u00eb\u00a7\u012e\u00eb\u0135\u00a4\u00ec\u0138\u00b4", - "\u0120\u00d1\u0123\u00d0\u00bb\u00d0\u00be\u00d0\u00b2\u00d0\u00b0", - "\u0120Gilbert", - "\u00e7\u013d\u00ae\u00e5\u012b\u012f", - "\u00d0\u00a2\u00d0\u00b0\u00d0\u00ba", - "voorbeeld", - "\u00d0\u00b5\u00d1\u0130\u00d1\u0123\u00d1\u012e", - "aryn", - "quez", - "\u0120dart", - "\u00d1\u0138\u00d1\u012a", - "\u0120Hut", - "Sal", - "\u0120southeast", - "\u0120pesticides", - "\u0120helicopters", - "\u0120endured", - "iada", - "\u0120brewing", - "\u00ec\u0139\u00ac\u00eb", - "\u0120\u00d1\u0123\u00d0\u00b2\u00d0\u00be\u00d0\u00b1\u00d0\u00be\u00d0\u00b4", - "\u0120Saints", - "\u0120Fran\u00c3\u00a7ais", - "\u0120Economics", - "\u0120disloc", - "ophobia", - "Camer", - "\u0120negotiated", - "\u0120\u00d1\u0123\u00d1\u0124\u00d0\u00b0\u00d0\u00bb\u00d0\u00b8", - "\u00ec\u012c\u00a4\u00ed\u0123", - "ogie", - "\u0120tsunami", - "\u0120peeled", - "\u0120motivations", - "\u00e8\u00a8\u0143", - "ostat", - "flan", - "\u0120DAC", - "\u0120kav", - "'RE", - "\u0120Pearson", - "bbe", - "czenie", - "\u0120aten\u00c3\u00a7\u00c3\u00a3o", - "\u00ed\u0128\u00b5\u00eb\u0142\u00b9", - "\u00e3\u0123\u00a3\u00e3\u0123\u00a1", - "\u0120\u00d1\u0125\u00d0\u00b4\u00d0\u00b0\u00d1\u0122", - "\u0120introductory", - "\u0120Ici", - "\u00eb\u012e\u0122\u00eb", - "akat", - "\u0120trench", - "\u0120proceeded", - "\u0120Coin", - "\u0120derecho", - "\u0120Rede", - "\u00e6\u00af\u013d", - "\u00d0\u00b0\u00d0\u00bd\u00d0\u00bd\u00d1\u012d\u00d0\u00b9", - "\u0120incarcerated", - "\u0120Richmond", - "Rock", - "\u0120Pav", - "\u0120Karma", - "uges", - "\u0120conte\u00c3\u00ba", - "\u00eb\u00b9\u0126", - "\u0120\u00ea\u00b7\u00b8\u00eb\u00a7\u012e", - "\u0120Gone", - "\u0120wsp\u00c3\u00b3\u00c5\u0124", - "\u0120Rahmen", - "unken", - "\u0120\u00ec\u00a4\u0133\u00ec\u013c\u0136\u00ed\u0137\u013e", - "\u0120ib", - "\u0120attaching", - "Hay", - "\u0120suka", - "\u00ec\u012f\u00b9", - "\u0120pivotal", - "\u0120Respect", - "\u00c3\u0143da", - "IB", - "\u0120Verantwort", - "wiet", - "\u0120forensic", - "\u00d1\u0122\u00d0\u00b8\u00d1\u0123\u00d1\u0124", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b8\u00d0\u00bd\u00d1\u0128\u00d0\u00b8\u00d0\u00bf\u00d0\u00b5", - "\u0120markings", - "\u0120kettle", - "\u0120Opera", - "\u0120Doctors", - "\u0120shredded", - "\u0120recuer", - "\u0120vigil", - "\u0120Fail", - "\u0120entrev", - "\u0120\u00d0\u00b4\u00d1\u0125\u00d1\u012a", - "\u0120outbreaks", - "\u00e8\u00b5\u00b0\u00e5\u0132\u00a7", - "\u0120\u00cf\u0122\u00ce\u00bf", - "\u0120rogue", - "angled", - "\u0120yearly", - "\u0120Creed", - "\u0120wam", - "\u0120lotus", - "\u00ea\u00b3\u00bc\u00eb", - "\u00e3\u0122\u0123\u00e3\u0122\u0123", - "\u0120Spit", - "\u0120Itu", - "\u0120strains", - "\u0120stamped", - "\u0120plaint", - "\u0120potion", - "\u0120consolidation", - "\u00e8\u00a9\u0137", - "\u00d0\u00be\u00d1\u0129\u00d0\u00ba\u00d1\u0125", - "\u0120vlogging", - "\u0120slate", - "\u0120Auft", - "\u0120Incor", - "\u00e1\u00bb\u00abng", - "\u00a7\u0132", - "enh", - "\u0120hei\u00c3\u0141", - "\u0120domest", - "\u0120Strom", - "\u00e5\u012f\u00b3", - "akis", - "\u0120fragen", - "\u0120finer", - "\u0120Sug", - "\u0120uphill", - "\u0120\u00c3\u00a9\u00c3\u00a9n", - "\u00e2\u0122\u00a6)", - "\u0120\u00d1\u0123\u00d0\u00be\u00d0\u00bf", - "\u0120Corey", - "\u0120siebie", - "\u0120muse", - "\u0120cloves", - "\u0120pous", - "\u0120Finanz", - "\u0120Route", - "amat", - "\u0120mutually", - "\u0120\u00d0\u00b2\u00d0\u00bd\u00d1\u0125\u00d1\u0124\u00d1\u0122\u00d0\u00b8", - "\u0120Selena", - "\u00eb\u0136", - "\u0120Gaussian", - "\u00eb\u00b6\u0122\u00ed\u0126\u00b0", - "\u0120\u00d7\u0133\u00d7\u013d", - "\u0120ejerc", - "\u00e5\u00be\u00ae", - "kea", - "\u0120Gerry", - "\u0120Sic", - "\u00e5\u00a4\u00a7\u00e7\u013c\u0126", - "\u01201966", - "iese", - "\u0120fossils", - "\u0120estad", - "\u0120Kane", - "ci\u00c4\u0129", - "\u0120\u00ec\u013e\u0142\u00ed\u012c\u013e\u00eb", - "\u0120\u00d0\u00bf\u00d0\u00b0\u00d0\u00bc", - "\u0120Cruise", - "int\u00c3\u00a9rieur", - "\u0120bekannt", - "\u0120Pode", - "\u0120demander", - "Rem", - "\u0120invade", - "\u0120decorating", - "ropic", - "\u0120cowboy", - "\u0120Photo", - "opolit", - "\u0120\u00ec\u00bb\u00ac\u00eb\u0141\u00ac\u00eb", - "\u0120reap", - "\u0120handwriting", - "\u00e0\u00b9\u0126\u00e0\u00b8\u00a3", - "\u0120\u00eb\u013c", - "\u0120\u00d8\u00a8\u00d8\u00b9\u00d8\u00af", - "\u0120Mt", - "\u00d9\u0122", - "\u0120spaceship", - "\u0120nationalism", - "\u0120councils", - "\u0120Griffin", - "\u0120Ahmed", - "\u0120clich", - "\u0120OL", - "wl", - "\u0120Pilot", - "\u00e5\u00ae\u00ae", - "\u0120acronym", - "\u0120gels", - "\u0120electroly", - "\u00e8\u0135", - "\u0120\u00d0\u00bc\u00d0\u00bd\u00d0\u00be\u00d0\u00b9", - "\u0120episod", - "\u0120Dieses", - "\u0120ATP", - "\u0120ediyorum", - "\u0120expresses", - "\u0120exhibits", - "Comm", - "\u0120\u00d0\u00ba\u00d1\u0122\u00d1\u0125\u00d0\u00bf", - "\u0120matar", - "\u01202025", - "\u0120Artem", - "vasive", - "r\u00c3\u0142", - "\u0120be\u00c5\u0141", - "\u00e9\u00bb\u0125", - "\u0120lizard", - "\u0120fille", - "\u0120\u00ec\u00a7\u012a\u00eb\u00ac\u00b8", - "\u0120\u00d0\u00bc\u00d0\u00be\u00d1\u012b", - "\u0120t\u00c3\u00bcr", - "\u0120culprit", - "\u0120woven", - "\u0120ANY", - "nim", - "\u0120tay", - "\u0120promin", - "\u0120acompa", - "\u0120id\u00c3\u00a9", - "\u0120boiler", - "\u0120Themen", - "\u0120avenue", - "\u0120Mud", - "\u0120\u00d0\u00bd\u00d0\u00be\u00d0\u00b2\u00d1\u012d\u00d0\u00b5", - "\u0120witnessing", - "\u0120lance", - "\u0120CHAN", - "\u0120Bever", - "\u00d8\u00aa\u00d9\u0127", - "\u0120chemotherapy", - "King", - "\u0120b\u00c4\u013bd\u00c4\u013b", - "\u0120atual", - "\u0120tive", - "\u0120talkin", - "\u0120quedar", - "ie\u00c3\u0141", - "edel", - "\u0120\u00ec\u0138\u00b4\u00ec\u0142\u013e", - "\u0120jogar", - "\u0120\u00c3\u00b6r", - "\u0120undertaking", - "\u0120Strength", - "\u0120milh\u00c3\u00b5es", - "\u0120Wine", - "\u0120Molt", - "\u00e8\u00ae\u00b2", - "\u00e3\u0123\u0133\u00e3\u0124\u012e", - "\u0120undermine", - "\u0120Archives", - "vana", - "mercial", - "MC", - "\u0120caste", - "\u00d0\u00bf\u00d1\u0122", - "\u0120legislators", - "ulators", - "\u00c3\u00aanio", - "\u0120\u00eb\u012f\u00b0\u00eb", - "\u0120\u00d1\u0127\u00d0\u00be\u00d1\u0124\u00d0\u00b8\u00d1\u0124\u00d0\u00b5", - "\u0120\u00d0\u00bd\u00d0\u00b5\u00d0\u00ba", - "\u0120surn", - "\u0120consci", - "\u0120POW", - "\u0120culinary", - "\u0120KAT", - "\u0120Folks", - "\u00d1\u012d\u00d0\u00b2\u00d0\u00b0\u00d0\u00b5\u00d0\u00bc", - "\u0120\u00d0\u00b2\u00d0\u00be\u00d0\u00ba", - "\u00e3\u0123\u0133\u00e3\u0124\u012d", - "service", - "pts", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00b1\u00d0\u00b5\u00d0\u00b4", - "\u00e6\u013a\u00af\u00e5\u0137\u012c", - "\u0120tents", - "\u0120nord", - "STE", - "\u0120republican", - "\u0120wyk", - "\u0120minions", - "\u00e8\u013b\u0137", - "\u0120memang", - "jest", - "\u0120comparative", - "\u0120tyle", - "carbon", - "bedingt", - "ksen", - "\u0120negativity", - "\u0120sj\u00c3\u00a4lv", - "\u0120d\u00c3\u00ba", - "\u00e6\u012b\u0122\u00e6\u013e\u012b", - "\u0120recalled", - "cra", - "\u0120Tada", - "\u0120\u00d1\u0122\u00d1\u0125\u00d0\u00ba\u00d0\u00b8", - "\u0120\u00d0\u00be\u00d0\u00bf\u00d1\u0122\u00d0\u00b5\u00d0\u00b4\u00d0\u00b5\u00d0\u00bb", - "\u0120procrast", - "\u0120jogos", - "\u0120Oo", - "\u0120Hearts", - "\u0120\u00c3\u00a9ch", - "\u0120ksi\u00c4\u0127\u00c5\u00bc", - "\u0120coarse", - "\u0120Tube", - "\u0120Greens", - "\u0120\u00c3\u00a9n", - "\u0120dumbbell", - "\u0120\u00d1\u0124\u00d0\u00b8", - "\u0120querer", - "\u00d8\u00a7\u00d8\u0143", - "\u00cf\u0125\u00ce\u00b5\u00ce\u00b9", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b0\u00d0\u00b2\u00d0\u00b8\u00d0\u00bb\u00d1\u012e\u00d0\u00bd\u00d0\u00be", - "\u0120\u00d0\u00bf\u00d0\u00b0\u00d0\u00bf", - "\u0120compra", - "\u0120t\u00c3\u00a9r", - "\u0120Antes", - "\u0120optimum", - "\u0120biscuit", - "\u00ce\u00ba\u00ce\u00b9", - "aczego", - "\u0120\u00ec\u012d\u013e\u00ea\u00b0\u0126\u00ec\u013f\u00b4", - "\u0120Marines", - "vero", - "\u0120vaccinations", - "\u0120petty", - "riters", - "\u0120\u00d0\u00b0\u00d0\u00bb", - "country", - "\u0120counters", - "\u0120attendant", - "\u0120Hui", - "\u00e3\u0123\u00a8\u00e3\u0123\u0126\u00e3\u0123\u0128\u00e3\u0123\u0135\u00e3\u0123\u00a8\u00e3\u0123\u00a7", - "cka", - "\u00d1\u0123\u00d1\u0124\u00d0\u00b2\u00d0\u00b5\u00d0\u00bd\u00d0\u00bd\u00d1\u012d\u00d0\u00b9", - "guy", - "\u0120tricked", - "\u0120RED", - "\u0120thrilling", - "\u00cf\u0122\u00ce\u00bf\u00ce\u00b9", - "\u0120piggy", - "\u0120anunci", - "ORTER", - "\u0120Value", - "\u0120rond", - "\u0120ADA", - "\u0120poser", - "hores", - "\u0120Roland", - "\u0135\u00af", - "\u0120noir", - "\u0120\u00d7\u00a9\u00d7\u0132\u00d7", - "\u00eb\u00b0\u013e", - "iemand", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d1\u0124\u00d0\u00b5\u00d1\u0122", - "\u00ea\u00b3\u00b3", - "\u0120\u00ea\u00b1\u00b1", - "\u0120formatting", - "\u0120Led", - "\u00e8\u00a7\u0122\u00e7\u013e\u00be", - "\u0120killers", - "\u0120\u00c4\u0133\u00e1\u00ba\u00a5y", - "\u0120haar", - "again", - "!>[", - "minster", - "\u0120\u00d0\u00b2\u00d0\u00bb\u00d0\u00b8", - "\u0120identifier", - "\u0120Lambda", - "\u0120tros", - "\u0120flawless", - "\u0120detrimental", - "\u0120bunlar\u00c4\u00b1", - "War", - "\u0120regi\u00c3\u00a3o", - "\u00e7\u013e\u0141\u00e7\u013c\u0126\u00e6\u013a\u00af", - "\u0120Bike", - "cessors", - "\u0120c\u00c3\u00b9ng", - "\u0120RN", - "\u0120\u00ea\u00bd\u0125", - "\u0120k\u00c3\u00bc\u00c3\u00a7\u00c3\u00bck", - "\u0120Beginning", - "\u00ed\u013a\u00b8\u00eb", - "\u0120gewe", - "\u0120denote", - "\u0120Alberto", - "\u0120probiot", - "\u0120ode", - "\u0120molar", - "\u0120bursting", - "assumed", - "\u0120footprints", - "veda", - "\u0120steroids", - "\u0120flaming", - "\u0120Eller", - "\u0120erkennen", - "\u00c3\u00a4tzen", - "\u0120lifecycle", - "\u0120DOU", - "\u0120Karena", - "\u0120Guerra", - "\u00e8\u00bf\u013a\u00e6\u013a\u00af", - "\u0120sinister", - "\u0120pod\u00c3\u00a9is", - "\u0120parab", - "\u0120oko", - "\u0120mat\u00c3\u00a9ri", - "\u0120caric", - "sonaro", - "\u0120praticamente", - "\u00d1\u0125\u00d1\u0123\u00d0\u00b0", - "\u0120comunque", - "\u0120vigilant", - "\u0120regimes", - "\u0120Shooting", - "\u0120raids", - "\u0120Nora", - "\u0120Wieder", - "mens", - "\u0120\u00d1\u0123\u00d0\u00be\u00d0\u00b4", - "\u0120\u00ea\u00b2\u00bd\u00ec\u013c\u00b0\u00ec\u0139\u0132\u00eb\u012c\u0136", - "\u0120\u00d0\u00b2\u00d1\u0127\u00d0\u00be\u00d0\u00b4", - "\u0120autobi", - "\u0120Schn", - "\u0120Robbie", - "\u0120Fitness", - "\u0120\u00d0\u00ba\u00d0\u00be\u00d0\u00bd\u00d1\u0126", - "\u0120penguin", - "\u00d0\u00bc\u00d0\u00be\u00d1\u0124\u00d1\u0122\u00d1\u0131", - "\u0120\u00d0\u00bc\u00d0\u00b8\u00d0\u00bd\u00d0\u00b8\u00d0\u00bc", - "plays", - "\u0120delegates", - "Mer", - "\u0120sistem", - "\u0120Michaels", - "male", - "\u00d8\u00a7\u00d8\u00b9", - "\u0120c\u00c3\u00a1ch", - "\u0120H\u00c3\u00a4", - "\u0120\u00d7\u013b\u00d7\u0137\u00d7\u0135\u00d7\u00a2", - "\u0120superpower", - "\u0120stron", - "\u0120rover", - "\u0120d\u00c3\u00a9pend", - "\u00e9\u013b\u00b3", - "\u0120retiring", - "\u0120vampires", - "\u0120merde", - "\u0120Changing", - "\u0120tame", - "\u0120spokesperson", - "\u0120cay", - "\u0120flirting", - "\u0120Gr\u00c3\u00b6", - "\u0120w\u00c3\u00a4r", - "\u0120wyb", - "\u0120coeur", - "\u00e1\u00ba\u00a1nh", - "\u0120\u00ec\u013b\u0122\u00ec\u0126\u013e", - "\u0120connais", - "\u0120Hundreds", - "\u0120Bea", - "\u0120\u00ce\u00b1\u00cf\u0122", - "pruch", - "\u0120sociedade", - "\u0120Whilst", - "\u0120Kait", - "espace", - "\u0120chia", - "\u0120Erm", - "\u0120\u00eb\u00b0\u0136\u00ea\u00bf", - "\u0120fences", - "\u0120Mortal", - "\u00ea\u00b2\u0123", - "\u0120\u00d0\u00b3\u00d1\u0122\u00d0\u00b0\u00d1\u0126", - "\u0120Homeland", - "\u0120JUN", - "isst", - "\u0120parlar", - "\u0120sporty", - "\u00c3\u00a9o", - "\u0120deepen", - "\u0120Behavior", - "\u00e9\u0122\u0131", - "\u00e5\u0135\u012a\u00e5\u0135\u012a\u00e5\u0135\u012a", - "\u0120errand", - "\u0120rotary", - "\u0120Wellington", - "Wind", - "\u0120mesela", - "\u00e1\u00ba\u00a3ng", - "iende", - "\u0120excell", - "\u0120Genius", - "\u0120Eduardo", - "\u00e6\u013e\u012b\u00e4\u00ba\u00ba", - "\u0120\u00c5\u0141unu", - "\u0120\u00c4\u00b0stanbul", - "\u0120produto", - "\u0120\u00e3\u0127\u0130\u00e3\u0127\u0130", - "OFF", - "\u0120wollt", - "\u00e7\u012a\u0128", - "\u0120\u00eb\u012b\u00b4\u00ec\u012c\u00a4", - "\u0120lass", - "\u0120hertz", - "\u0120aromatic", - "\u0120\u00d0\u00b7\u00d0\u00b2\u00d0\u00be\u00d0\u00bd", - "\u0120autoc", - "\u0120Lust", - "\u0120112", - "\u0120\u00ce\u0139", - "\u0120reviewers", - "\u0120receptive", - "\u00e5\u00b0\u012f\u00e4\u00ba\u0128", - "\u00c3\u00a2nd", - "oglo", - "\u0120\u00ec\u0137\u0126\u00eb\u012d\u013b", - "\u0120ngo", - "\u00d1\u0138\u00d1\u0124\u00d0\u00b8", - "\u00c3\u00a5t", - "cono", - "\u0120tekrar", - "\u0120\u00ec\u00a3\u00bc\u00ea\u00b3\u0142", - "\u0120gelmi\u00c5\u0141", - "\u0120bedtime", - "\u0120Argh", - "ADA", - "\u0120\u00d0\u00b3\u00d0\u00be\u00d1\u0122\u00d0\u00be\u00d0\u00b4\u00d0\u00b0", - "\u0120\u00c4\u0129", - "\u0120alliances", - "giggling", - "\u0120yerde", - "\u0120spies", - "\u0120gutes", - "\u00c3\u00a7i", - "\u0120alltid", - "\u0120Lah", - "\u0140\u0132\u00eb", - "\u0120dok\u00c5\u0124ad", - "\u00d9\u012a\u00d9\u012c", - "\u0120toxicity", - "\u0120cancellation", - "\u01201958", - "dro", - "\u0120\u00ec\u0140\u0133\u00ec\u013f\u0122", - "\u0120Motorola", - "\u0120multin", - "\u0120enthusiasts", - "\u0120Mighty", - "\u0120Coconut", - ":\u00e3\u0122\u012e", - "\u0120Pictures", - "\u0120sangre", - "\u0120blinking", - "olesome", - "\u0120\u00ec\u012c\u00a4\u00ed\u0125\u0122\u00ec\u013f\u00bc", - "FP", - "\u0120booming", - "\u0120\u00d0\u00b4\u00d0\u00b5\u00d1\u0123\u00d1\u0131\u00d1\u0124", - "\u0120ratchet", - "\u0120timelines", - "leness", - "\u0120cages", - "\u0120Goodnight", - "ometimes", - "\u0120cunning", - "\u0120Risk", - "uled", - "dade", - "\u0120prata", - "\u0120gustar\u00c3\u0143a", - "amus", - "\u0120Jinping", - "\u0120estrut", - "\u0120descobrir", - "\u0120M\u00c4\u0123", - "\u0120Allan", - "\u0120\u00e5\u012a\u0128", - "\u0120\u00d7\u013e\u00d7\u00a7", - "\u0120preserv", - "\u0120Strawberry", - "\u00c4\u0131", - "Lu", - "\u0120kro", - "\u0120Reports", - "\u00ec\u0127\u0136\u00ec\u0137\u00bc", - "\u0120valt", - "\u0120pouvait", - "\u0120appar", - "\u0120Bone", - "\u0120preferably", - "\u0120Rep\u00c3\u00bablica", - "\u00e5\u00b0\u00b1\u00e5\u012a\u00b0", - "\u0120herzlich", - "\u0120chimney", - "\u0120\u00c3\u00a7ev", - "\u0120visas", - "\u0120verr", - "\u0120cultivation", - "\u0120Armenia", - "\u0120\u00d0\u00b2\u00d0\u00b4\u00d1\u0122\u00d1\u0125\u00d0\u00b3", - "\u0120cockro", - "retched", - "artz", - "\u0120\u00d0\u00bb\u00d1\u0130\u00d0\u00b4\u00d1\u0131\u00d0\u00bc", - "\u0120pol\u00c3\u0143ticas", - "\u0120Panz", - "\u0120AKA", - "\u0120\u00eb\u012a\u012e\u00eb\u0141\u00ac", - "\u0120erro", - "\u0120camper", - "\u0120102", - "\u00e0\u00a4\u00b8", - "done", - "\u0120hoard", - "\u0120\u00d0\u0141\u00d0\u00be\u00d1\u0124\u00d0\u00be\u00d0\u00bc", - "jeong", - "\u0120desta", - "pak", - "\u0120inim", - "\u0120growers", - "\u0120Message", - "\u0120elector", - "engage", - "\u0120Forbes", - "\u0120Cincinnati", - "\u0120diff\u00c3\u00a9rence", - "df", - "\u0120spar", - "\u0120awaits", - "\u0120USSR", - "\u0120Rising", - "\u0120Ho\u00c5\u0141", - "\u0120footing", - "\u0120condiciones", - "\u00d1\u0124\u00d0\u00be\u00d1\u0122\u00d0\u00be\u00d0\u00b2", - "\u0120clinician", - "\u0120Diskuss", - "\u00e5\u00a3\u0135", - "\u00d7\u00a8\u00d7\u0134", - "\u00d7\u00a5", - "iteit", - "gren", - "\u0120charisma", - "\u0120leuke", - "\u0120irritating", - "\u0120circa", - "\u0120Rhodes", - "\u0120pior", - "\u0120handicap", - "royable", - "\u0120vull", - "OG", - "\u0120in\u00c3\u0143cio", - "ieri", - "\u0120splashing", - "\u0120demise", - "\u0120assistir", - "\u00d1\u0129\u00d1\u0124\u00d0\u00be", - "\u0120covert", - "\u0120Gud", - "\u00e0\u00b8\u012b", - "kl\u00c3\u00a4r", - "\u0120\u00ec\u0140\u0132\u00ea\u00be\u00b8", - "\u0120ver\u00c3\u00a4ndert", - "\u0120REM", - "\u0120Conven", - "atge", - "\u0120pierwsze", - "\u0120clergy", - "lington", - "liv", - "VPN", - "\u0120\u00d1\u0123\u00d0\u00be\u00d0\u00b6\u00d0\u00b0\u00d0\u00bb", - "\u0120Hate", - "\u00e3\u0123\u00a8\u00e3\u0123\u0135\u00e3\u0124\u012f", - "\u00cf\u0128\u00ce\u00bf", - "\u0120Respons", - "\u00d0\u00be\u00d0\u00b7\u00d0\u00b4", - "\u0120etmek", - "\u0120chemin", - "\u00d9\u0127\u00d8\u00a9", - "\u0120\u00ea\u00b0\u0122\u00ec\u00a1\u00b1", - "Tre", - "\u0120umas", - "\u0120Burton", - "\u0120patriarch", - "\u0120Smithsonian", - "\u00a5\u013a", - "Moon", - "Air", - "\u0120medios", - "\u0120eraser", - "\u0120wollten", - "\u0120pareil", - "\u0120Billie", - "\u00e6\u012c\u00bd", - "\u00d0\u00b5\u00d1\u0122\u00d1\u0124\u00d0\u00b2", - "\u0120parlament", - "\u0120agony", - "\u0120QUE", - "sequently", - "Another", - "\u0120Whew", - "\u0120Annual", - "\u0120seben", - "\u00ec\u0125\u0123\u00ec\u013f\u0126", - "values", - "\u0140\u013e\u00eb\u00a7\u012e", - "\u0120sinon", - "ereal", - "\u0120Enlight", - "\u0120Chemistry", - "\u0120Catalunya", - "\u0120doctr", - "anton", - "\u0120stuk", - "\u0120Plate", - "\u0120Kardashian", - "\u0120filos", - "\u0120Wet", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00bf\u00d1\u012d\u00d1\u0124", - "\u0120unknowns", - "\u0120Schon", - "\u0120Baldwin", - "\u0120telescopes", - "\u0120Gucci", - "oxide", - "\u0120Conservative", - "\u00ec\u0126\u00b1\u00ec\u013f\u0126", - "\u0120hinaus", - "Power", - "\u0120\u00ea\u00b1\u00b4\u00ea\u00b0\u0137", - "\u0120prevail", - "orman", - "machine", - "\u01201946", - "\u0120unbel", - "\u0120schaut", - "\u0120piel", - "eenth", - "\u0120objectively", - "\u0120chakra", - "audio", - "\u0120chicos", - "\u0120Vault", - "\u00e5\u00b0\u012a", - "\u0120medicinal", - "\u0120Tail", - "While", - "\u0120asphalt", - "\u0120froze", - "\u0120EK", - "unching", - "nosis", - "2015", - "\u0120Gri", - "\u0120oddly", - "\u0120M\u00c3\u00a4r", - "\u0120Aeg", - "colo", - "Par", - "\u0120\u00eb\u0135\u00a4\u00ec\u0138\u00b4\u00eb", - "\u0120vinden", - "\u0120OVER", - "\u0120iced", - "\u0120scorp", - "\u0120hac", - "qualified", - "\u0120\u00d1\u0125\u00d0\u00b2\u00d0\u00b8\u00d0\u00b4\u00d0\u00b5\u00d1\u0124\u00d1\u012e", - "ermo", - "HEN", - "\u0120soi", - "\u0120multiples", - "\u0120layouts", - "\u0120blindness", - "\u0120Bowser", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00b4\u00d1\u0124", - "\u0120\u00c3\u0130", - "ventional", - "\u0120mata", - "mad\u00c4\u00b1", - "\u0120geez", - "\u0120cadence", - "\u0120wa\u00c5\u00bcne", - "\u0120Christie", - "venge", - "Call", - "\u0120turnaround", - "\u0120blob", - "\u0120\u00d0\u00af\u00d0\u00ba", - "\u0120Voiceover", - "\u0120peril", - "\u0120Jaime", - "\u0120HOY", - "lane", - "\u0120sebel", - "\u0120Duo", - "\u0120Historical", - "\u0120dni", - "\u0120gema", - "yk", - "\u0120sabem", - "\u00e1\u00ba\u00afng", - "\u0120vars", - "\u0120Ronnie", - "\u0120Ronaldo", - "\u0120Perqu\u00c3\u00a8", - "nsinn", - "hair", - "\u0120relentless", - "\u0120lyn", - "\u0120traveler", - "\u00e6\u0122\u0130\u00e9\u00ba\u00bc\u00e4\u00ba\u0128", - "nine", - "\u0120antim", - "\u0120\u00ec\u00bc\u0122", - "\u0120snowball", - "\u0120\u00d1\u0127\u00d0\u00b0\u00d1\u0122\u00d0\u00b0\u00d0\u00ba\u00d1\u0124\u00d0\u00b5\u00d1\u0122", - "\u0120interns", - "\u0120constituency", - "\u0120\u00d0\u013f\u00d0\u00b0\u00d0\u00bc", - "\u00d7\u013e\u00d7\u013e", - "VEL", - "\u0120viktigt", - "\u0120apoyo", - "\u00d9\u0126\u00d8\u00a8", - "\u0120jard", - "\u0120heightened", - "\u00d1\u0122\u00d0\u00be\u00d1\u0123\u00d1\u0124", - "\u0120SMITH", - "\u0120\u00d0\u00b4\u00d0\u00b5\u00d0\u00bb\u00d0\u00b0", - "\u0120repairing", - "\u0120rigt", - "\u0120Sheikh", - "\u0120Britney", - "\u0120everytime", - "\u0120adventurous", - "ockey", - "ernt", - "\u0120ataque", - "\u0120Alternatively", - "effect", - "\u0120palavras", - "\u0120Elliott", - "\u0120r\u00c3\u00a9ussi", - "\u0120hypertension", - "\u0120Manual", - "\u0120prophetic", - "\u0120handc", - "\u00d1\u012e\u00d0\u00b5", - "\u0120refrain", - "\u0120Squid", - "\u00ec\u0140\u00a1", - "\u0120\u00d0\u00ba\u00d0\u00be\u00d0\u00bc\u00d0\u00b0\u00d0\u00bd", - "\u00c3\u00a4llen", - "\u0120lleg\u00c3\u00b3", - "\u0120bash", - "iony", - "\u0120\u00d1\u0123\u00d0\u00ba\u00d0\u00bb\u00d0\u00b0\u00d0\u00b4", - "\u0120\u00d0\u00ba\u00d0\u00b0\u00d0\u00b1", - "\u0120careless", - "\u0120Pool", - "\u0120tr\u00c3\u00a1s", - "\u0120fils", - "\u0120Schr", - "\u0120sprawd", - "\u0120Monaten", - "\u0120unforgettable", - "\u0120Cotton", - "\u0120inconvenient", - "\u0120RX", - "oris", - "\u0120humbled", - "\u00d7\u00aa\u00d7\u0139", - "\u0120\u00d8\u00a2\u00d9\u00be", - "\u0120incre\u00c3\u0143", - "\u0120Kommentare", - "\u00e8\u012a\u0134", - "raci\u00c3\u00b3n", - "\u0120vantage", - "\u0120Seal", - "\u0120\u00ec\u013f\u00b4\u00ea\u00b1\u00b0\u00eb\u00a5\u00bc", - "\u0120joue", - "\u00e3\u0123\u013f\u00e3\u0123\u0128\u00e3\u0123\u00a7\u00e3\u0123\u013b\u00e3\u0123\u0143", - "\u0120\u00ec\u013a\u00a4\u00eb\u0140\u013a", - "\u0120\u00d0\u00b8\u00d1\u0123\u00d0\u00bf\u00d1\u012d\u00d1\u0124", - "oben", - "\u0120grate", - "\u0120controle", - "\u0120Percy", - "\u00c5\u0124ada", - "\u0120simultaneous", - "\u0120prototy", - "\u0120gro\u00c3\u0141er", - "\u0120bewusst", - "inizi", - "\u0120passieren", - "\u0120Happiness", - "\u00e5\u012b\u0129", - "shi", - "geht", - "\u0120stationed", - "\u0120Ergebnis", - "\u0120directamente", - "\u0120survives", - "\u0120persones", - "BERG", - "\u0120vomiting", - "\u0120conhecer", - "\u0120adjour", - "\u0120Civic", - "pei", - "burst", - "\u0120\u00eb\u012d\u00a4\u00eb\u012d\u012a", - "\u00e9\u0131", - "\u0120sled", - "\u0120plataforma", - "\u0120Sect", - "\u0120Defin", - "\u00e7\u013b\u00bb\u00e9\u012e\u00b2", - "\u00c3\u00a9nom", - "chnet", - "\u0120profitability", - "\u0120erreicht", - "\u00e1\u00bb\u0131i", - "cation", - "\u0120\u00ec\u00a7\u0122\u00ea\u00b8", - "\u0120perdre", - "\u0120felony", - "\u01201957", - "\u00e6\u012a\u0133\u00e5\u00be\u012a", - "\u0120unsuccessful", - "\u0120nagyon", - "\u0120elasticity", - "\u0120facade", - "\u0120earthly", - "\u0120\u00d0\u00b0\u00d0\u00bc\u00d0\u00b5\u00d1\u0122\u00d0\u00b8\u00d0\u00ba\u00d0\u00b0\u00d0\u00bd", - "\u0120conn", - "cla", - "Du", - "\u0120politiques", - "\u0120halo", - "iantes", - "\u0120\u00d0\u00bc\u00d0\u00be\u00d0\u00b5\u00d0\u00b9", - "\u00e3\u0125\u00b3\u00e3\u0125\u012b", - "tones", - "elier", - "\u00e8\u00ae\u013c", - "htaking", - "\u0120wichtige", - "\u0120anno", - "\u0120Lok", - "illions", - "\u0120viver", - "\u0120solchen", - "\u0120suf", - "\u0120Salz", - "\u0120Nvidia", - "zuge", - "\u0120Spike", - "Video", - "\u0120twor", - "\u0120Ala", - "\u00e8\u0133\u012b", - "\u0120hanya", - "\u0120Adm", - "\u00ec\u013f\u00b5", - "\u0120Patienten", - "\u0120Onion", - "\u0120Kobe", - "\u0120Scene", - "\u0120Rash", - "\u00e6\u00a8\u013b", - "\u00d1\u0122\u00d0\u00b0\u00d1\u0123\u00d1\u0124", - "istani", - "General", - "leye", - "imbap", - "\u0120concealed", - "\u0120Fridays", - "\u0120Wool", - "\u0120\u00d0\u00bd\u00d0\u00be\u00d0\u00b2\u00d1\u012d\u00d1\u0127", - "\u00d8\u00b4\u00d8\u00b1", - "\u0120\u00ea\u00b2\u00b0\u00ea\u00b3\u00bc", - "\u0120jedoch", - "\u00b4\u00ec\u012d\u013e", - "\u0135\u00a4\u00eb\u0131\u0126", - "\u0120\u00ec\u0140\u00a5\u00eb\u0124\u013e", - "ukt", - "Lou", - "\u0120\u00eb\u00a8\u00b9\u00ec\u0138\u00b4", - "\u0120Expect", - "\u0120\u00d0\u00b4\u00d0\u00be\u00d0\u00bc\u00d0\u00be\u00d0\u00b9", - "\u0120irresponsible", - "\u0120acerca", - "\u0120Zust", - "\u00d7\u00a8\u00d7\u013a", - "UI", - "\u0120youtubers", - "\u0120Positive", - "\u0120socioe", - "\u0120snatch", - "\u00e8\u0125\u012e", - "\u0120refreshed", - "\u0120nominations", - "\u0120Patt", - "\u0120obsolete", - "\u0120demi\u00c5\u0141", - "\u00e5\u0131\u00a4", - "ormu\u00c5\u0141", - "\u0120\u00ec\u0128\u0136\u00ec\u00a7\u0123\u00ed\u0140\u012a", - "\u0120fla", - "\u0120craziest", - "\u0120Zie", - "\u0120T\u00c3\u00ba", - "zep", - "icem", - "\u0120\u00eb\u00a9\u012d\u00ec\u0140\u012a", - "\u0120cynical", - "\u00e3\u0123\u013f\u00e3\u0124\u0135\u00e3\u0123\u00aa", - "\u0120tresp", - "\u0120craz", - "\u00d5\u00a5\u00d5", - "\u0120nelle", - "\u0120mph", - "\u0120Nered", - "\u0120Kob", - "\u0120Eck", - "\u00a8\u00b8\u00eb\u012d\u012a", - "Jan", - "\u0120\u00d0\u00a2\u00d0\u00be\u00d0\u00b3\u00d0\u00b4\u00d0\u00b0", - "\u0120deci", - "\u0120Vog", - "\u0120bubbling", - "\u00e9\u0122\u0122", - "\u00c3\u00baa", - "\u0120productos", - "iberal", - "\u0120replicated", - "\u0120Improve", - "illary", - "Cha", - "\u0120r\u00c3\u00a9du", - "\u0125\u0132\u00ed\u0137\u013a\u00eb\u00a9\u00b4", - "\u0120connot", - "\u0120Krit", - "\u0120\u00d0\u00b4\u00d1\u0125\u00d1\u0127\u00d0\u00be\u00d0\u00b2", - "\u0120treadmill", - "\u0120PW", - "\u0120\u00d0\u00b7\u00d0\u00be\u00d0\u00b2\u00d1\u0125\u00d1\u0124", - "\u0120clams", - "\u0120drafting", - "\u01201956", - "unta", - "\u0120expenditures", - "\u0120Hoover", - "WOO", - "\u00d1\u012a\u00d0\u00b5\u00d0\u00b5", - "\u0120deduction", - "monary", - "\u0120recib", - "\u0120povo", - "\u0120\u00eb\u012f\u0136\u00eb", - "\u0120PAL", - "\u0120Blow", - "\u0120wyp", - "\u0120destac", - "deal", - "Graeme", - "\u0120n\u00c3\u00a9cessaire", - "\u0120damned", - "\u01201938", - "\u0120\u00ec\u012d\u00a4\u00ec\u0142\u013e\u00eb\u00a1\u013e", - "\u0120troop", - "\u0120insightful", - "\u0120TJ", - "\u0120\u00d0\u00be\u00d1\u0123\u00d0\u00b2", - "\u0120fidelity", - "\u0120Skip", - "\u0120Mayo", - "\u00eb\u00a7\u013f", - "appe", - "\u0120blas", - "\u0120WY", - "\u0120GN", - "ctar", - "Su", - "\u0120cuent", - "hews", - "\u0120corpses", - "Abs", - "\u0120wastewater", - "\u0120ciek", - "\u0120Onu", - "\u0120explosives", - "\u0120arma", - "\u0120STEPHAN", - "politik", - "\u0120Osaka", - "ta\u00c5\u0124", - "\u0120yap\u00c4\u00b1yor", - "\u0120izquier", - "\u0120beleza", - "\u0120Wyatt", - "\u00e5\u0132\u00b8", - "\u0120suk", - "\u0120specjal", - "\u0120danke", - "whistle", - "\u0120f\u00c3\u0143sica", - "\u0120Harriet", - "\u0120\u00ec\u0137\u0126\u00ed\u012e\u012e", - "\u0120willkommen", - "iping", - "\u0120\u00d1\u0123\u00d0\u00bc\u00d0\u00be\u00d1\u0124\u00d1\u0122\u00d0\u00b8\u00d1\u0124\u00d0\u00b5", - "\u0120\u00d0\u00bc\u00d0\u00be\u00d0\u00b6\u00d0\u00b5\u00d1\u012a\u00d1\u012e", - "\u0120inaccurate", - "\u0120arrogance", - "\u0120Remo", - "\u00ce\u00b3\u00ce\u00ac", - "assed", - "\u0120deliveries", - "\u0120stinky", - "\u0120\u00d0\u00bf\u00d0\u00b5\u00d1\u0122\u00d0\u00b5\u00d0\u00b6", - "jay", - "\u0120transitional", - "\u0120rere", - "\u0120NGOs", - "\u0120ATM", - "\u00d8\u00ae\u00d8\u00aa", - "iology", - "\u0120\u00d0\u00b2\u00d0\u00bb\u00d0\u00b0\u00d0\u00b4", - "\u0120schme", - "\u0120Shine", - "\u00ec\u0137\u00a1", - "pants", - "\u0120serge", - "\u0120senhor", - "\u0120abduct", - "\u0120Bryant", - "VES", - "\u0120awakened", - "\u0120Laz", - "ropolis", - "\u0120Lao", - "\u00e8\u00be\u013d\u00e8\u012d\u00a6", - "\u0120villa", - "\u0120summers", - "\u0120enthal", - "\u01201949", - "Via", - "\u0120\u00ec\u0138\u00b4\u00ec\u00a8", - "\u0120tendon", - "\u0120violet", - "\u0120intellectually", - "\u0120bounced", - "araus", - "\u01201919", - "\u0120vraag", - "\u0120spel", - "\u0120Schwar", - "Scott", - "\u0120Indo", - "\u0120\u00eb\u00a7\u013f", - "\u0120canonical", - "\u0120IKE", - "\u0120that\u00c3\u0143s", - "\u0120mellan", - "\u00e6\u00af\u0134", - "igmat", - "Could", - "...?)", - "\u0120foarte", - "\u0120Kumar", - "rendo", - "\u0120\u00c3\u00a9l\u00c3\u00a9", - "\u00e0\u00b4", - "valuation", - "cases", - "\u0120intuitively", - "hong", - "etted", - "\u0120souven", - "\u0120morb", - "\u0120cors", - "\u0120NV", - "\u0120Hasan", - "\u00e6\u0125\u0127\u00e5\u0128\u00b5", - "ieved", - "\u0120\u00ec\u00a7\u0122\u00ea\u00b8\u012a\u00ec\u013f\u0122", - "\u0120dumpling", - "\u0120contr\u00c3\u00b4le", - "\u0120ambiguity", - "\u00e6\u00a9\u0141\u00e6\u013e\u0125", - "\u0120cog", - "\u0120Scriptures", - "\u0120cai", - "\u0120bever", - "\u00e5\u00a4\u00a7\u00e5\u00ae\u00b6\u00e9\u0125\u00bd", - "\u0120huis", - "\u0120aime", - "\u0120erkl\u00c3\u00a4ren", - "\u0120LM", - "\u0120Fey", - "\u00e9\u013c\u00be", - "\u00e0\u00ae\u00b1\u00e0\u00ae\u00a4", - "\u0120supervised", - "\u0120jewe", - "spl", - "\u0120\u00d1\u0128\u00d0\u00b5\u00d0\u00bd\u00d1\u0124\u00d1\u0122", - "\u0120collisions", - "\u00d9\u0126\u00d9\u0123", - "\u0120Hogwarts", - "\u0120Durham", - "\u00d7\u0137\u00d7\u00a3", - "\u0120phosphate", - "\u0120oversee", - "\u0120inspections", - "\u0120brinc", - "\u0120Zak", - "\u0120payoff", - "\u0120chaud", - "\u0120Hunger", - "\u00c3\u00a3os", - "vir", - "\u0120fiance", - "\u0120boug", - "lived", - "cry", - "\u00e5\u013d\u0140\u00e4\u00be\u0128", - "\u0120jointly", - "\u0120girlfriends", - "\u0120Nexus", - "\u00a6\u00ac\u00ea\u00b2\u0142\u00ec\u012c\u00b5\u00eb\u012d\u012a\u00eb\u012d\u00a4", - "\u0120Kwang", - "\u00e5\u0135\u012a\u00e5\u013d\u012b", - "\u00e5\u00a7\u0133", - "\u00c5\u0124\u00c4\u013b", - "\u0120Neden", - "iece", - "\u0120inserting", - "\u00e6\u0141\u0135", - "\u0120Mummy", - "\u0120Globe", - "\u0120lee", - "\u0120german", - "\u0120creams", - "acho", - "\u0120ch\u00c6\u00b0a", - "\u0120Galile", - "\u0120f\u00c3\u00bcrs", - "\u0120estiver", - "cidos", - "Christian", - "\u0120lorsqu", - "\u0120cutest", - "vale", - "\u0120\u00d0\u00ba\u00d1\u0122\u00d0\u00b5\u00d0\u00bf", - "\u0120wary", - "\u0120slicing", - "\u0120esperando", - "\u0120Vander", - "\u0120Deixa", - "\u01201954", - "\u0120m\u00c3\u00b3wi\u00c4\u0127", - "\u00d1\u0138\u00d1\u0136", - "\u0120tooling", - "\u0120restor", - "\u0120posici\u00c3\u00b3n", - "\u0120intentar", - "\u0120Apache", - "OUL", - "\u0120\u00d9\u012a\u00d8\u00a8", - "\u0120mati\u00c3\u00a8re", - "\u00e3\u0125\u00bc\u00e3\u0124\u0135", - "\u0120linen", - "\u0120estrat\u00c3\u00a9g", - "\u0120Mutta", - "\u00e9\u00a1\u00af", - "\u00e8\u00a1\u012e\u00e4\u00ba\u0128", - "\u0120parting", - "\u0120minimizing", - "\u0120apprendre", - "\u00e6\u013e\u013f", - "\u0120\u00d0\u00b0\u00d0\u00bd\u00d0\u00b3\u00d0\u00bb\u00d0\u00b8\u00d0\u00b9", - "\u0120Doo", - "\u0120Firefox", - "c\u00c3\u00b3mo", - "\u0120geopolit", - "\u0120makan", - "\u0120mogelijk", - "\u0120\u00cf\u0122\u00ce\u00b5\u00cf\u0123\u00ce\u00b9", - "\u0120c\u00e1\u00bb\u00a9", - "\u0120installer", - "\u0120dibuj", - "\u0120Heath", - "loop", - "\u0120Broken", - "HYUN", - "shelf", - "\u0120fizer", - "\u0120enhances", - "\u00e4\u00be\u012d\u00e3\u0123\u012a\u00e3\u0123\u00b0", - "\u0120\u00d0\u00b4\u00d0\u00be\u00d1\u0123\u00d1\u0124\u00d0\u00b8", - "\u0120PUB", - "\u0120Kollegin", - "\u0120attained", - "\u00c4\u00be", - "\u0120mistress", - "\u0120Oftentimes", - "\u00d7\u0140\u00d7\u013b\u00d7\u013f", - "\u0120bewe", - "\u0120Sora", - "rauen", - "baum", - "\u0120rollers", - "\u0120mering", - "\u0120PAC", - "\u0120\u00d0\u00bd\u00d1\u0138", - "\u0120R\u00c3\u00a9publique", - "\u0120\u00d1\u0124\u00d1\u0122\u00d0\u00b0\u00d0\u00b2", - "\u0120Vanguard", - "uciones", - "\u0120\u00eb\u00ac\u00b4\u00eb\u012e\u0122", - "\u0120gour", - "\u00af\u00a4", - "\u0120\u00cf\u012b", - "\u0120sauna", - "\u0120peine", - "\u0120Valerie", - "\u0120Sikh", - "fendimiz", - "bero", - "\u0120\u00d1\u0129\u00d0\u00b8", - "\u0120do\u00c5\u013dwiad", - "\u0120Euros", - "\u0120commentaires", - "\u0120tweaks", - "\u0120Faster", - "\u0120\u00d1\u0122\u00d0\u00b0\u00d1\u0123\u00d0\u00ba", - "\u0120progressively", - "\u0120Euch", - "boro", - "\u0120Ingred", - "Cap", - "\u0120uncheck", - "\u0120\u00ec\u013a\u00a4\u00eb\u00a5\u00b8", - "\u0120wre", - "\u0120FT", - "\u00c3\u00b6rung", - "\u0120memorized", - "\u0120Dinner", - "\u0120Phew", - "oubl", - "\u0120puta", - "\u0120admits", - "\u00d0\u00b5\u00d0\u00b7\u00d0\u00b4\u00d0\u00b5", - "opod", - "\u0120panda", - "\u0120hinges", - "cipe", - "\u0120transact", - "\u0120podia", - "\u0120pics", - "\u0120criterion", - "\u0120Orchestra", - "\u0120Blog", - "\u0120solemn", - "\u0120Pixar", - "Three", - "\u0120\u00d0\u00b2\u00d0\u00bd\u00d0\u00b8\u00d0\u00b7", - "\u0120Volunte", - "\u0120Savage", - "\u0120PVC", - "\u0120Caf", - "\u0120wykon", - "\u0120graders", - "\u0120crouch", - "\u0120cliche", - "\u0120soybeans", - "\u0120MUR", - "\u0120Gonzalez", - "\u0120Mimi", - "\u0120Bolsonaro", - "\u0120diaphrag", - "\u0120bilang", - "\u00eb\u0132\u013a\u00eb\u012c\u0136", - "\u00e9\u0124\u00a3\u00e6\u012a\u0133\u00e5\u0122\u0133", - "\u0120regulating", - "Mc", - "Judge", - "\u0120\u00d0\u00bd\u00d0\u00be\u00d0\u00b6", - "\u0120jak\u00c4\u0127", - "itesse", - "\u0120Wij", - "\u0120lata", - "groaning", - "POSING", - "\u0120\u00d7\u0132\u00d7\u0137\u00d7\u00aa\u00d7\u0137", - "\u0120haga", - "\u0120grounding", - "\u0120violently", - "\u0120tills", - "\u0120engag", - "\u0120Hollow", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d0\u00bf\u00d1\u0125\u00d0\u00bb\u00d1\u0131\u00d1\u0122", - "\u0120wprowad", - "\u0120replaces", - "\u0120fluorescent", - "urgical", - "iggly", - "\u0120Traditional", - "tte", - "\u0120\u00d9\u0126\u00d9\u0129", - "\u0120phosphorus", - "\u0120apron", - "\u0120Waters", - "\u0120Kultur", - "\u00d0\u00b0\u00d0\u00b2\u00d0\u00b0\u00d0\u00b9", - "\u0120olives", - "\u0120\u00d7\u0136\u00d7\u0132\u00d7\u013e", - "\u0120teilweise", - "\u0120sencill", - "\u0120prends", - "\u0120narrower", - "\u0120j\u00c3\u00a4tte", - "\u0120Informationen", - "\u00ec\u0125\u0123\u00ec\u013f\u00b4", - "\u0120starve", - "\u0120frick", - "\u0120Beweg", - "\u00e0\u00a4\u00b2", - "\u0120dolphin", - "\u0120LAUGHTER", - "\u0120INTERVIE", - "\u00e5\u0136\u012b", - "\u0120yanl\u00c4\u00b1\u00c5\u0141", - "\u0120torpedo", - "\u0120shortages", - "\u00ec\u013f\u00b4\u00eb\u0135\u013e", - "\u00c4\u00b1ld\u00c4\u00b1", - "\u0120paws", - "\u0120ozone", - "\u0120cultivated", - "\u0120Fot", - "\u0120notor", - "\u00d0\u00bd\u00d0\u00be\u00d0\u00b7", - "\u0120\u00d0\u00ba\u00d0\u00be\u00d1\u012a", - "\u0120touchscreen", - "\u0120Ally", - "\u00e6\u013e\u0122\u00e8\u00bf\u0133", - "\u0120\u00eb\u00a7\u013d\u00ec\u0140\u012a\u00ec\u0138\u00b4\u00ec\u013c\u0136", - "\u0120\u00d0\u00a1\u00d0\u00b5\u00d1\u0122", - "\u0120\u00d0\u00b2\u00d0\u00bf\u00d0\u00be\u00d0\u00bb\u00d0\u00bd\u00d0\u00b5", - "\u0120paprika", - "\u0120Dustin", - "\u0120efecto", - "\u0120opini", - "\u0120muut", - "\u0120h\u00e1\u00bb\u012fc", - "\u0120interject", - "\u00c4\u013bt", - "\u0120butts", - "urez", - "\u0120Pike", - "\u0120Hok", - "\u0120Guinea", - "\u0120Cathedral", - "\u01201400", - "Cra", - "+,", - "\u00eb\u00a7\u013d", - "\u00b3\u00b4\u00eb\u0131\u0126\u00eb\u00a1\u013f", - "abyrin", - "\u0120videog", - "\u0120\u00d0\u00be\u00d1\u0122\u00d1\u0125\u00d0\u00b6", - "\u0120u\u00c5\u00be", - "\u0120buscando", - "\u0120Assistance", - "\u00e9\u013b\u00bd", - "\u0120melhores", - "\u00ec\u00a1\u00b4", - "\u0120\u00eb\u0123\u00bc", - "\u0120RJ", - "\u0120\u00d8\u00aa\u00d9\u0127", - "\u0120omin", - "\u0120motorcycles", - "\u0120Sapp", - "\u0120supplying", - "\u0120Algun", - "\u0120aerospace", - "\u00d7\u00a2\u00d7\u013e", - "occup", - "leist", - "\u0120\u00ea\u00b1\u00b0\u00eb\u012c\u0136", - "\u0120completa", - "bres", - "!(", - "\u0120\u00d0\u0141\u00d1\u0122\u00d0\u00b5\u00d0\u00b4", - "\u0120disadvantaged", - "\u0120Attend", - "\u0120Judah", - "\u00e1\u00bb\u012dch", - "ylene", - "actly", - "\u0120setups", - "\u0120ammonia", - "\u0120Schweiz", - "\u0120Shame", - "\u0120bande", - "\u0120Fuel", - "\u0120troublesome", - "\u0120numero", - "\u0120MOM", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b5\u00d0\u00b4\u00d0\u00bb\u00d0\u00b0\u00d0\u00b3", - "mentioned", - "\u0120\u00d0\u00b1\u00d0\u00be\u00d0\u00bb\u00d1\u012e\u00d1\u012a\u00d0\u00be\u00d0\u00b5", - "\u0120Viktor", - "\u0120Styles", - "\u0120crucified", - "ructured", - "environ", - "\u0120morals", - "\u0120meditating", - "\u0120axial", - "isance", - "\u0120Abst", - "Green", - "\u0120\u00ea\u00b1\u00b4\u00ec", - "\u0120quadrant", - "\u0120pergi", - "\u0120cameraman", - "\u0120Sequ", - "\u0120paused", - "\u0120Laughing", - "\u00ea\u00b7\u0122", - "?..", - "\u0120\u00c5\u00bbe", - "\u0120permitir", - "\u0120detectors", - "\u0120HUD", - "aval", - "\u0120\u00ec\u0139\u00ac\u00ea\u00b8\u00b0\u00ea\u00b9\u012e\u00ec\u00a7\u0122", - "\u0120hubs", - "\u0120bestimmt", - "\u0120\u00d0\u00b1\u00d1\u0125\u00d0\u00b4\u00d0\u00b5\u00d1\u0124\u00d0\u00b5", - "INTERPOSING", - "\u0120tengan", - "\u0120crave", - "\u0120Bundesregierung", - "\u0120Bloody", - "\u0120usability", - "\u0120Eas", - "\u0120\u00c4\u0133\u00e1\u00bb\u013bng", - "\u01201955", - "\u0120kriegen", - "\u0120habitual", - "\u0120essentials", - "riminal", - "\u0120roommates", - "\u00e9\u0124\u00a3\u00e5\u00b0\u00b1", - "\u0120\u00d0\u00bf\u00d0\u00b5\u00d1\u0122\u00d0\u00b5\u00d1\u0127\u00d0\u00be\u00d0\u00b4", - "\u0120nghi", - "\u0120mening", - "\u0120Symphony", - "\u0120Hug", - "aggi", - "\u0120wied", - "\u0120mitad", - "\u00e3\u0123\u00a3\u00e3\u0123\u00a6\u00e3\u0123\u0126\u00e3\u0123\u0128", - "teenth", - "ida\u00c4\u0129", - "Save", - "\u0120robi\u00c4\u0129", - "\u0120bounces", - "\u00b0\u0138\u00ec\u0139\u0132", - "stars", - "\u0120pragmatic", - "\u0120cognition", - "\u0120wrapper", - "\u0120warten", - "adh", - "\u0120pensa", - "\u0120Hertz", - "\u0120n\u00c4\u013d", - "\u0120Reid", - "\u0120PCs", - "\u0120Mole", - "\u0120.....", - "\u0120precio", - "\u0120Championships", - "\u00ea\u00b0\u0122\u00eb\u013f\u00bd", - "\u0120v\u00c3\u00a9r", - "\u0120corridors", - "\u0120Electronic", - "Sl", - "\u0120\u00d0\u00b0\u00d0\u00bb\u00d0\u00b5", - "\u0120overthrow", - "\u0120kabul", - "\u0120RES", - "\u0120Cyberpunk", - "\u00d0\u00be\u00d0\u00b3\u00d0\u00be\u00d0\u00b4", - "\u0120\u00d0\u013f\u00d0\u00b0\u00d0\u00b2", - "\u0120wan", - "\u0120manifestations", - "\u0120cuales", - "\u0120Wise", - "\u0120L\u00c3\u00b6sung", - "\u0120exfol", - "\u0120earns", - "\u00d1\u0125\u00d1\u0123\u00d1\u0124\u00d0\u00b8\u00d1\u0124\u00d1\u012e", - "\u0120sapp", - "\u0120Braun", - "\u0120BRANDON", - "\u00ec\u00b9\u013b", - "\u0120sano", - "\u0120FEL", - "\u00d1\u012d\u00d0\u00b2\u00d0\u00b0\u00d0\u00b9\u00d1\u0124\u00d0\u00b5\u00d1\u0123\u00d1\u012e", - "\u00d0\u00be\u00d0\u00b6\u00d0\u00b4\u00d0\u00b5\u00d0\u00bd\u00d0\u00b8\u00d1\u0131", - "\u0120sewn", - "Fun", - "\u0120reciprocal", - "\u0120expansive", - "\u0120Traffic", - "\u0120kt\u00c3\u00b3rego", - "\u0120\u00d9\u012a\u00d8\u00b3", - "\u00e6\u013a\u00a5", - "\u0120\u00eb\u00b9\u00a8", - "prove", - "igare", - "\u0120loh", - "\u00d8\u00a7\u00d8\u00b6", - "Hope", - "\u0120devotees", - "\u0120Gom", - "\u0120steals", - "\u0120Ums", - "\u0120Twice", - "\u00e3\u0124\u00b2", - "iyim", - "\u0120rhythmic", - "\u0120Vorte", - "\u0120prefix", - "omination", - "\u0120dato", - "\u0120custard", - "\u0120VOICE", - "\u00e5\u00b7\u0140", - "\u0120meny", - "istors", - "\u0120\u00ed\u013a\u0133", - "\u0120\u00ec\u0124\u00b4\u00ec\u0137\u0126", - "\u0120\u00ed\u0125\u0126", - "\u0120kort", - "\u0120aba", - "\u0120Vera", - "epy", - "\u0120\u00ec\u00b9\u00b4\u00eb\u00a9\u0136\u00eb\u013f\u00bc", - "\u0120submerged", - "\u0120Clock", - "\u0120thumbnails", - "\u0120boast", - "\u0120Fare", - "!!]", - "\u0120\u00c5\u013dm", - "\u0120kaikki", - "\u0120Technologies", - "\u00ec\u013b\u00b8", - "\u00e3\u0125\u0134", - "\u00d0\u00b8\u00d1\u0124\u00d0\u00b0\u00d0\u00b9", - "\u00e5\u00b0\u0131\u00e6\u013b\u0124", - "\u0120\u00d0\u00b0\u00d1\u0124", - "\u0120knobs", - "\u0120reicht", - "\u00c6\u00b0\u00e1\u00bb\u00a3ng", - "glio", - "\u0120\u00eb\u00a7\u013d\u00ec\u013f\u00b4", - "\u00ea\u00b0\u0132\u00ec\u013f\u0126", - "\u0120jotka", - "\u0120Handy", - "\u0120Haben", - "nous", - "\u0120inland", - "\u0120amazon", - "hooting", - "SL", - "\u0120leisten", - "~\"", - "\u0120provoke", - "\u0120Twist", - "\u0120\u00d7\u0133\u00d7\u0139", - "\u0120departed", - "\u00ea\u00b0\u013e\u00eb\u00a5\u00bc", - "\u0120konse", - "\u0120Carwyn", - "\u00ed\u0137\u013a\u00ec\u012d\u0142", - "idental", - "ESCO", - "\u0120tteokbokki", - "\u0120dizendo", - "\u00e7\u00b7\u00b4", - "\u00c4\u00b1ndaki", - "imasu", - "afar", - "\u0120landfill", - "\u0120correcting", - "\u0120clears", - "\u0120Nummer", - "HAM", - "\u0120cartridges", - "\u0120Diesel", - "paced", - "\u0120obliv", - "\u0120moyens", - "\u0120Sinne", - "\u0120Preis", - "iliz", - "\u0120\u00d1\u0123\u00d0\u00bc\u00d0\u00be\u00d0\u00b6", - "\u0120broaden", - "\u00e4\u00bb\u0138\u00e6\u013a\u00af", - "xes", - "\u0120carbohydrate", - "\u00ed\u013a\u00b9", - "seok", - "\u0120echoes", - "\u0120cess", - "\u00eb\u00b0\u0136", - "\u0120\u00d0\u00b1\u00d0\u00b8\u00d0\u00b7\u00d0\u00bd\u00d0\u00b5\u00d1\u0123", - "\u0120llamado", - "\u0120essent", - "\u0120\u00ec\u013f\u00bc\u00eb\u00b0\u013a", - "\u0120Aires", - "phen", - "\u0120zebra", - "\u0120symbolism", - "Once", - "\u0120racks", - "\u0120Kafka", - "\u0120\u00d1\u0123\u00d0\u00b5\u00d1\u0122\u00d1\u012e\u00d0\u00b5\u00d0\u00b7", - "\u0120sinn", - "picious", - "kaa", - "\u0120motherfucker", - "\u0120apprenticeship", - "\u0120rpm", - "\u0120taxation", - "\u0120furry", - "\u0120Sacred", - "\u0120\u00d1\u0122\u00d0\u00b0\u00d0\u00b7\u00d0\u00bc", - "pora", - "enges", - "\u0120\u00ed\u0139\u012a\u00eb", - "\u0120\u00d1\u0123\u00d0\u00b8\u00d0\u00bd", - "\u0120sanitizer", - "\u0120cringe", - "\u0120Sca", - "\u00d0\u00be\u00d1\u0129\u00d0\u00bd\u00d0\u00be", - "\u0120ofere", - "\u0120melodies", - "\u0120Velvet", - "\u0120Ihrer", - "\u0120Hybrid", - "\u0120Giov", - "\u0120irgendwas", - "\u0120depende", - "\u0120Users", - "\u0120hump", - "driving", - "\u0120sf", - "\u0120ruthless", - "\u00e0\u00b9\u0122\u00e0\u00b8\u0126", - "\u0120lemons", - "\u0120f\u00c3\u00b6ret", - "\u0120Oj", - "\u0120\u00d0\u00bc\u00d0\u00b0\u00d0\u00bc\u00d0\u00b0", - "\u0120interpersonal", - "\u0120gev", - "\u0120abnorm", - "\u00d0\u00b8\u00d1\u0123\u00d0\u00bb", - "\u0120\u00d0\u00b8\u00d0\u00bd\u00d0\u00b4", - "\u0120kontroll", - "\u0120regres", - "\u0120ledge", - "\u0120erz\u00c3\u00a4hlt", - "\u0120Tact", - "\u0120arriv\u00c3\u00a9", - "\u0120substantive", - "\u0120spoonful", - "zwischen", - "ooooo", - "\u0120contenido", - "\u0120besl", - "\u00e1\u00bb\u0125m", - "kten", - "Jamie", - "\u0120sandy", - "\u00e4\u00b8\u012f\u00e5\u0132\u012e", - "\u00e2\u012d", - "\u0120pase", - "\u0120dette", - "\u0120Belgian", - "\u00ea\u00b0\u013e\u00eb", - "ulares", - "rud", - "igor", - "\u0120\u00ed\u012e\u00ac\u00eb", - "\u0120remedies", - "\u0120blasting", - "\u0120Sich", - "\u0120\u00d0\u00be\u00d0\u00b6\u00d0\u00b8\u00d0\u00b4", - "\u0120monstr", - "\u0120manifold", - "\u0120glauben", - "\u0120EST", - "\u0120streamline", - "\u0120lobbying", - "\u0120Gothic", - "toire", - "..'", - "\u0120d\u00c3\u00a9mocr", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d0\u00b1\u00d0\u00bb\u00d1\u0130\u00d0\u00b4", - "\u0120wsp\u00c3\u00b3l", - "\u0120cz\u00c4\u013b\u00c5\u013d\u00c4\u0129", - "\u00e4\u00b8\u012d\u00e9\u013f\u00a2", - "is\u00c3\u00a9s", - "gangen", - "\u0120bezpie", - "remlin", - "\u00ea\u00b0\u013f", - "Still", - "\u0120resides", - "\u0120gelecek", - "\u0120t\u00c3\u00a9l\u00c3\u00a9phone", - "\u0120pewn", - "\u0120leopard", - "\u0120complimentary", - "\u0120crib", - "\u0120Animals", - "\u0120geil", - "essel", - "\u0120garder", - "\u0120catchy", - "\u00e6\u00a8\u00b9", - "\u0120Ets", - "\u0120Commercial", - "\u0120DENNIS", - "\u0120Coordinator", - "\u0120Abigail", - "ffffff", - "\u00e1\u00ba\u00a5p", - "\u0120peque\u00c3\u00b1a", - "\u0120injections", - "cekt", - "\u0120philanthropy", - "\u0120puck", - "\u0120celebrates", - "\u0120Dunk", - "\u0120Dlatego", - "\u00e3\u0123\u00be\u00e3\u0123\u0142", - "\u00ce\u00b4\u00ce\u00ae", - "graduate", - "\u0120Mobil", - "till", - "acam", - "\u0120yolks", - "\u0120tangled", - "\u0120maniac", - "\u0120obliged", - "\u0120Laink", - "\u0120verder", - "\u0120Damon", - "\u0120mutant", - "\u0120hopping", - "\u0120reins", - "\u0120inverter", - "\u0120contempt", - "\u00d7\u0142\u00d7\u00a1", - "learning", - "Miss", - "\u0120\u00d0\u0135\u00d0\u00be\u00d1\u0123", - "\u0120Meyer", - "\u00ea\u00bb\u013a\u00ec\u0126\u013e", - "\u00e9\u00a3\u0130", - "\u00d7\u0137\u00d7\u0142\u00d7\u013b\u00d7\u013f", - "asking", - "\u0120trimming", - "\u0120treasury", - "\u0120sente", - "Aust", - "\u0120Unterst\u00c3\u00bctzung", - "\u0120Comedy", - "\u0120Anakin", - "\u00e9\u00b9", - "\u00d1\u0122\u00d1\u0125\u00d1\u0124", - "\u0120Hari", - "ographers", - "\u0120oatmeal", - "\u0120Bots", - "\u00e4\u00b8\u012f\u00e4\u00ba\u0128", - "\u0120\u00d0\u00bf\u00d0\u00b0\u00d0\u00bb\u00d1\u012e", - "\u0120acknowledgement", - "xic", - "\u0120\u00ea\u00b4\u0122\u00ec\u012d\u00ac", - "gasping", - "\u0120\u00e3\u0123\u0137", - "\u0120terrace", - "\u0120ornaments", - "\u0120MER", - "committee", - "\u0120\u00ec\u0139\u0128\u00ec\u012c\u00b5\u00eb\u012d\u012a\u00eb\u012d\u00a4", - "\u0120rij", - "\u00e9\u00b3", - "\u00d7\u00a6\u00d7\u013f", - "leme", - "\u0120liberties", - "\u0120fellas", - "\u0120Copper", - "bench", - "\u0120Idea", - "\u00e1\u00bb\u012fn", - "\u00d1\u012a\u00d0\u00b0", - "\u0120versi\u00c3\u00b3n", - "\u00cf\u0126\u00ce\u00bf\u00cf\u012f", - "\u0120\u00d0\u013e\u00d0\u00b8", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b8\u00d0\u00bb\u00d0\u00be\u00d0\u00b6", - "\u0120boxer", - "\u0120Tanner", - "\u0120Moy", - "\u00ec\u00b9\u013a\u00eb\u012c\u0136", - "Thr", - "\u0120tinham", - "\u0120polishing", - "\u0120consequently", - "\u0120amenities", - "\u0120KI", - "\u0120GREEN", - "\u0120Frankie", - "\u00d0\u00bd\u00d0\u00b8\u00d1\u0124", - "ittel", - "\u00d1\u0123\u00d0\u00ba\u00d0\u00be\u00d0\u00b5", - "ursed", - "\u0120upbringing", - "\u0120th\u00e1\u00bb\u00a9", - "\u0120\u00ec\u012d\u013f\u00ec\u013e\u00bc\u00eb\u00a1\u013e", - "\u0120whim", - "\u0120chinese", - "confidence", - "\u0120Jeder", - "\u00e3\u0123\u00aa\u00e3\u0123\u00ae\u00e3\u0123\u00a7", - "ajcie", - "\u0120Tous", - "\u0120Powers", - "\u00e1\u00bb\u00aba", - "othermal", - "\u0120\u00d0\u00b2\u00d1\u012d\u00d1\u012a\u00d0\u00b5", - "rale", - "\u00d8\u00a7\u00d8\u00ae", - "\u0120\u00ec\u00a7\u0122\u00ec\u013d\u0132", - "\u0120\u00c3\u00a9pisode", - "\u0120sulph", - "\u0120encara", - "kraft", - "alar\u00c4\u00b1", - "\u0120Comes", - "\u0120divul", - "\u0120Rudolph", - "\u0120Muse", - "\u0120utens", - "\u0120\u00ec\u0140\u0132\u00ec\u00a3\u00bc", - "\u0120pana", - "\u0120Vegeta", - "\u0120PHP", - "\u0120NSA", - "entin", - "\u0120Carnegie", - "\u00d8\u00a7\u00d9\u012c", - "i\u00c4\u013bcy", - "Harry", - "\u0120f\u00c4\u00b1r", - "\u00d0\u00a1\u00d0\u00bf", - "\u0120gladly", - "\u0120averaging", - "\u00ed\u0137\u013a\u00ea\u00b2\u0142\u00ec\u012c\u00b5\u00eb\u012d\u012a\u00eb\u012d\u00a4", - "\u00d0\u00bb\u00d1\u0131\u00d1\u0130\u00d1\u0124\u00d1\u0123\u00d1\u0131", - "\u0120\u00d0\u013e\u00d0\u00b5\u00d0\u00bd\u00d1\u0131", - "\u0120quotation", - "rires", - "itchens", - "ayed", - "\u0120unatt", - "\u0120Perez", - "\u0120\u00d0\u00be\u00d1\u0124\u00d0\u00bc\u00d0\u00b5\u00d1\u0124", - "\u0120tactile", - "\u0120Euh", - "isini", - "buh", - "\u0120hat\u00c4\u00b1r", - "\u0120\u00ec\u0140\u012a\u00ec\u013e\u00bc", - "\u0120policymakers", - "\u00b3\u00b4\u00ec\u0126\u00b8\u00ec\u013c\u0136", - "ac\u00c4\u00b1", - "\u0120\u00ce\u00ba\u00ce\u00b9", - "\u0120registering", - "reto", - "\u0120Sprinkle", - "\u0120Grammy", - "axter", - "\u0120\u00d0\u00b1\u00d0\u00b8", - "\u0120sitter", - "\u0120predic", - "\u0120thinly", - "\u0120strum", - "\u0120aggrav", - "\u0120aha", - "\u00d8\u00b1\u00d8\u00ac", - "mellow", - "\u0120constante", - "\u0120Laut", - "iston", - "\u0120transitioned", - "\u0120Cambodia", - "\u00e3\u0123\u0126\u00e3\u0123\u012f\u00e3\u0123\u00be\u00e3\u0123\u013b", - "\u00e8\u00b7\u0141\u00e5\u00a4\u00a7\u00e5\u00ae\u00b6", - "arted", - "\u0120misf", - "\u0120Punkte", - "\u012e\u00eb\u0135\u0142", - "\u0120trembling", - "\u0120gespannt", - "\u0120\u00d8\u00b9\u00d9\u0126\u00d9\u012c\u00d9\u0129", - "\u0120\u00d0\u00bd\u00d0\u00b8\u00d0\u00ba\u00d0\u00b0\u00d0\u00ba\u00d0\u00b8\u00d1\u0127", - "\u0120\u00eb\u00b6\u0122\u00eb\u0135\u013e\u00eb", - "\u0120\u00d1\u0122\u00d0\u00b0\u00d0\u00b7\u00d0\u00b2\u00d0\u00b8\u00d1\u0124", - "\u0120itchy", - "\u0120ciento", - "\u0120plains", - "\u0120kittens", - "\u0120backlog", - "\u0120Presiding", - "pta", - "\u0120havoc", - "\u0120Darrin", - "\u0120\u00d0\u013d\u00d1\u0130\u00d0\u00b1", - "\u0120segregated", - "\u0120ghetto", - "\u0120erlebt", - "\u0120drugiej", - "\u0120Sixt", - "\u00e5\u0131\u0125", - "\u00e0\u00b8\u00a3\u00e0\u00b8\u00b0", - "uencia", - "\u0120\u00ed\u0137\u013a\u00ea\u00b8\u00b0", - "\u0120\u00eb\u0128\u012f", - "\u0120robi", - "\u0120pioneers", - "\u0120milliards", - "\u0120Witcher", - "\u0120\u00eb\u00ac\u00b4\u00ec\u0139\u0129", - "orro", - "mass", - "\u0120divergence", - "\u0120Rivera", - "\u0120Noodles", - "\u0120endroit", - "\u0120Kosten", - "\u0120\u00d0\u00b4\u00d1\u0122\u00d1\u0125\u00d0\u00b3\u00d0\u00b0", - "\u0120m\u00c3\u0143nimo", - "\u0120Kazakhstan", - "\u00d8\u00aa\u00d9\u0129", - "\u0120\u00d0\u00b2\u00d0\u00be\u00d0\u00b7\u00d0\u00b4\u00d1\u0125", - "\u0120geschrieben", - "\u0120Nil", - "\u00d1\u0123\u00d0\u00ba\u00d0\u00b8", - "\u0120Fr\u00c3\u00bch", - "\u0120beverages", - "\u00e6\u00ba\u0132", - "\u0120Gon", - "\u00e6\u013a\u00a8", - "Arin", - "\u0120Intro", - "ocalyptic", - "\u0120exhaustion", - "\u0120Status", - "\u0120Battery", - "\u00c3\u00a9sz", - "\u00a3\u00bc\u00eb", - "airy", - "\u0120\u00eb\u00b3\u00b4\u00ec\u0139\u00ac\u00eb\u0135\u013e\u00eb", - "\u0120disparity", - "\u00d9\u012e", - "\u0120Tucson", - "\u0120brightly", - "problem", - "\u0120biomass", - "\u00e9\u013b\u012f", - "\u00a7\u012b", - "\u0120hurdle", - "\u0120wavelengths", - "\u0120<<", - "\u0120teamed", - "FFFF", - "\u0120Slim", - "omial", - "\u0120unveiled", - "\u0120Verein", - "\u00d9\u0124\u00d8\u00b7", - "estry", - "\u0120cl\u00c3\u00a1s", - "\u0120cheddar", - "\u0120accusing", - "\u0120Scientific", - "\u0120\u00d0\u00b1\u00d1\u0125\u00d0\u00b4\u00d0\u00b5", - "\u0120Cyrus", - "\u00ce\u00b5\u00cf\u0126\u00ce\u00b5", - "\u0128\u0135\u00ea\u00b3\u0142", - "\u0120\u00eb\u00b3\u0126", - "\u0120curd", - "\u0120referrals", - "shift", - "\u00e5\u012f\u0137", - "nik\u00c3\u00b3w", - "\u0120mier", - "\u0120confronting", - "\u00ea\u00b2\u0125\u00eb\u0131\u0126", - "awl", - "\u0120tryin", - "\u0120\u00ea\u00b7\u00b8\u00eb\u0140\u013a\u00ec\u013c\u0136", - "\u0120chiar", - "\u0120\u00ec\u013a\u00a4\u00eb\u012c\u013a\u00eb\u0131\u0126", - "\u00e6\u0136\u00bf\u00e6\u00b2\u00bb", - "esque", - "\u0120mismos", - "\u0120Shak", - "\u0120sociaux", - "\u0120pi\u00c5\u0141", - "\u0120ki\u00c5\u0141i", - "\u0120cyan", - "hay", - "bew", - "bod", - "\u0120\u00ce\u00b9", - "\u0120Mainly", - "\u00d1\u0130\u00d1\u0124\u00d1\u012e", - "habitude", - "\u0120\u00d1\u0123\u00d0\u00bf\u00d0\u00be\u00d0\u00ba\u00d0\u00be\u00d0\u00b9", - "\u00e8\u00b7\u0141\u00e6\u012a\u0133", - "\u0120precon", - "\u0120Mandy", - "\u00f0\u0141\u00a4\u00a3", - "illos", - "\u0120grupp", - "\u0120crumble", - "\u0120constructor", - "ervices", - "\u0120lighthouse", - "\u0120Concept", - "\u00d0\u00b0\u00d0\u00bd\u00d1\u0124\u00d0\u00b8", - "altro", - "hope", - "\u0120Alleg", - "\u00ec\u0138\u00b4\u00eb\u00a5\u00bc", - "pieces", - "ounter", - "\u0120\u00ed\u0137\u013a\u00eb\u012d\u012a\u00ea\u00b9\u012e", - "\u0120\u00ec\u013f\u00b8\u00ed\u0126\u00b0\u00eb", - "\u0120v\u00c3\u00a9ritable", - "\u0120threaded", - "blind", - "\u0124\u013a\u00eb\u013f\u00bc", - "\u0120trays", - "\u0120Edison", - "\u0120\u00c3\u0138z", - "\u0120Stevie", - "\u0120lender", - "\u0120brigade", - "\u0120deutsche", - "muffled", - "bart", - "\u0120insanity", - "\u0120savvy", - "\u0120sensational", - "\u0120derechos", - "\u0120MX", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00b5\u00d0\u00bf", - "\u0120threatens", - "\u0120realt\u00c3\u0142", - "\u0120indicative", - "\u0120chops", - "\u0120benefiting", - "\u0120Vernon", - "\u0120Strand", - "nun", - "quently", - "101", - "\u0120eel", - "\u00ec\u012a\u013b", - "rints", - "\u0120\u00d9\u0127\u00d8\u00b3", - "\u0120\u00d8\u00a8\u00d8\u00af", - "\u0120\u00d0\u00bf\u00d0\u00be\u00d1\u0123\u00d1\u0124\u00d1\u0122\u00d0\u00be", - "\u0120yapm\u00c4\u00b1\u00c5\u0141", - "\u0120olmas\u00c4\u00b1", - "\u0120iedereen", - "ol\u00c3\u00a9", - "kef", - "\u0120\u00eb\u00b0\u013e\u00ec\u0125\u013f", - "\u0120rained", - "\u0120almighty", - "\u0120\u00d0\u00b2\u00d1\u012d\u00d0\u00b4", - "\u0120CPR", - "Fre", - "\u0120inhabited", - "\u0120arbets", - "\u0120akin", - "\u00d0\u00b0\u00d1\u0123\u00d1\u0124\u00d0\u00b2", - "vania", - "\u0120h\u00c3\u00a4ufig", - "\u0120Matte", - "sorry", - "Jenny", - "\u0120\u00d0\u00b3\u00d1\u0122\u00d0\u00b0\u00d0\u00b4", - "\u0120whit", - "\u0120brokers", - "\u00e5\u00af\u0141", - "\u0120hine", - "asten", - "\u0120\u00d0\u00b3\u00d1\u0122\u00d1\u0125", - "MB", - "\u0120PRI", - "Sab", - "\u0120wrestler", - "\u0120facilitating", - "\u0120ehk\u00c3\u00a4", - "\u0120Cred", - "\u0120127", - "\u0120nothin", - "\u0120mandated", - "\u00e5\u00af\u012e", - "\u00d1\u0125\u00d1\u0124\u00d1\u0123\u00d1\u0124\u00d0\u00b2", - "Frank", - "\u0120wors", - "\u0120dzie\u00c5\u0126", - "\u0120Underground", - "\u0120znajdu", - "\u0120B\u00c3\u00a4", - "\u0120Prinzip", - "\u00d0\u00b0\u00d1\u0124\u00d0\u00b5\u00d0\u00bb\u00d0\u00b5\u00d0\u00b9", - "\u0120veterinar", - "\u0120splendid", - "\u0120rozp", - "\u0120psychopath", - "igon", - "\u0120hops", - "\u0120c\u00e1\u00ba\u00a7n", - "\u0120Xian", - "\u0120troisi\u00c3\u00a8me", - "\u0120producto", - "\u0120de\u00c4\u0141er", - "\u0120Continuing", - "\u00d0\u00b8\u00d0\u00b2\u00d0\u00b0\u00d0\u00bb", - "c\u00c4\u00b1k", - "\u0120moisturizer", - "White", - "\u0120siis", - "\u0120Everest", - "ienced", - "\u0120c\u00e1\u00ba\u00a3m", - "\u0120Japon", - "\u00b4\u00ec\u0142\u0126", - "\u0120ten\u00c3\u0143an", - "\u0120encanta", - "Mm", - "\u0120dropdown", - "\u0120Iya", - "\u00b3\u00b4\u00eb\u00a9\u00b4", - "\u0120wording", - "\u0120Squeeze", - "\u0120Maple", - "\u0120clarified", - "\u0120Municip", - "\u0120Rouge", - "\u0120Nicki", - "\u0120Goo", - "volt", - "tek", - "fecture", - "fred", - "arrive", - "\u00e3\u0125\u00bc\u00e3\u0123\u0126", - "tez", - "Ep", - "\u0120obras", - "\u0120VID", - "\u0120Riv", - "\u0120Modi", - "ibe", - "\u0120acontecendo", - "\u0120imitation", - "\u0120camouflage", - "\u0120spanning", - "\u0120SECRET", - "\u0120Oreo", - "\u00ec\u0128\u012e\u00eb\u00a6\u00ac", - "\u0120hunch", - "\u0120ca\u00c5\u0124e", - "\u0120spontaneously", - "\u0120Perd", - "\u0120etap", - "\u0120Hole", - "\u0120Disability", - "\u0120afterlife", - "\u00e6\u0123\u00a9", - "\u0120testified", - "\u0120presup", - "\u0120petroleum", - "\u0120contrario", - "\u0120Assessment", - "\u00c4\u0141lu", - "\u0120pests", - "\u0120dilig", - "\u0120\u00d0\u00b2\u00d1\u0123\u00d1\u0124\u00d1\u0122\u00d0\u00b5\u00d1\u0124", - "\u0120cons\u00c3\u00a9qu", - "\u0120cannons", - "\u0120canoe", - "\u0120Mile", - "\u0120citoy", - "\u0120begged", - "\u0120Minnie", - "\u00c5\u0124ych", - "\u0120principe", - "\u00cf\u0122\u00cf\u012e\u00ce\u00bd", - "mniej", - "\u0120wert", - "\u0120\u00eb\u012d\u00a4\u00eb\u0135\u00a4", - "anse", - "\u0120uncles", - "\u0120provocative", - "\u0120intersections", - "\u0120democrats", - "\u0120Julius", - "\u00d0\u00b8\u00d0\u00bd\u00d0\u00ba\u00d0\u00b8", - "ygusal", - "\u0120\u00d7\u013e\u00d7\u0137", - "\u0120gjorde", - "\u0120gasket", - "\u0120Bock", - "\u0120\u00c4\u00b0n", - "breat", - "\u0120Equity", - "ard\u00c4\u00b1", - "\u0120\u00d0\u00ba\u00d0\u00b0\u00d0\u00bd\u00d0\u00b0\u00d0\u00bb\u00d0\u00b5", - "\u0120\u00d0\u00b4\u00d0\u00bd\u00d0\u00b5\u00d0\u00b9", - "\u0120t\u00e1\u00bb\u013di", - "\u0120fixture", - "\u0120abuses", - "\u0120vaya", - "\u0120ouvert", - "\u0120multicultural", - "\u0120contexto", - "\u0120Sesame", - "\u0120d\u00c3\u00a9pl", - "\u0120consomm", - "\u0120Parte", - "\u0120pem", - "\u0120Conan", - "\u0120\u00d0\u00b1\u00d1\u0138\u00d0\u00bb\u00d1\u012e", - "\u0120persuaded", - "\u0120drains", - "Moo", - "FORE", - "\u0120\u00d0\u00b1\u00d0\u00b0\u00d1\u0124", - "\u0120fod", - "\u0120Products", - "\u00ec\u00a7\u0126\u00ec\u00a7\u013e", - "\u0120\"[", - "\u0120Wick", - "\u0120Naruto", - "\u00d0\u00bd\u00d0\u00b0\u00d0\u00bb\u00d0\u00b8", - "ryw", - "\u0120lodge", - "\u0120inh", - "\u0120vontade", - "\u0120dij", - "\u0120Jes\u00c3\u00bas", - "Looking", - "\u0120forearm", - "\u0120Integration", - "\u0120HARRIS", - "\u0120toolbar", - "leader", - "\u0120seldom", - "\u0120\u00d0\u00b1\u00d1\u0122\u00d0\u00be\u00d1\u0123", - "\u0120Kook", - "\u00d0\u00be\u00d0\u00bd\u00d0\u00b4", - "\u0120monopol", - "\u0120millet", - "\u0120lira", - "\u0120Asians", - "\u01201890", - "ci\u00c4\u0141im", - "\u0120eden", - "\u0120IKEA", - "\u0120Neighbor", - "\u0120Kazuya", - "\u00c3\u00bcd", - "\u0120psychedel", - "\u0120envisioned", - "\u00e5\u013f\u0139", - "\u0120\u00ef\u00b7\u00bb", - "\u0120wunder", - "\u0120Bulgaria", - "Brid", - "\u0120marrow", - "\u0120depiction", - "\u0120Tin", - "\u0120Pharise", - "\u0120einzige", - "\u0120blindly", - "\u00e3\u0123\u013d\u00e3\u0123\u00a6", - "\u0120defens", - "Dire", - "\u0120vibrating", - "\u0120trolls", - "\u0120disrespectful", - "\u0120wod", - "\u0120stimuli", - "\u0120creeping", - "\u0120clairement", - "\u0120scariest", - "\u0120d\u00c3\u00a9couvrir", - "\u0120104", - "\u0120\u00d0\u00b2\u00d0\u00b5\u00d1\u0122\u00d1\u0127", - "\u0120\u00c5\u0124at", - "\u0120r\u00c3\u00b3\u00c5\u00bcne", - "\u0120barley", - "\u0120Repl", - "\u0120Twe", - "kke", - "\u0120\u00e3\u0123\u013f\u00e3\u0124\u012e", - "\u0120Redmi", - "\u0120Metroid", - "\u0120\u00ce\u00ae\u00cf\u0126\u00ce\u00b1\u00ce\u00bd", - "Check", - "\u0120SEN", - "\u0120ido", - "\u00d1\u0124\u00d0\u00be\u00d1\u0122\u00d0\u00b8\u00d0\u00b8", - "\u00c3\u00b3p", - "UNKNOWN", - "\u0120\u00c3\u00a4ndern", - "\u0120Juice", - "\u0120Gesicht", - "\u00e5\u00b0\u00b1\u00e6\u013e\u0125", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d1\u0123\u00d1\u0124\u00d0\u00be\u00d0\u00bb\u00d1\u012e\u00d0\u00ba\u00d0\u00be", - "\u00ed\u0125\u0137", - "\u00c2\u0143", - "exhales", - "\u0120\u00ec\u00b4\u012b", - "\u0120jsem", - "\u00cf\u0122\u00cf\u012b\u00cf\u0124", - "\u0120itt", - "\u00eb\u00aa\u0127\u00ec\u013f\u00b4", - "\u0120remix", - "\u0120blossoms", - "\u0120Renee", - "isations", - "\u00ec\u012c\u00a4\u00ed\u0126\u00b0", - "\u0120\u00eb\u00b3\u00b4\u00ec\u013f\u00b4\u00eb\u012c\u0136", - "uestas", - "opedia", - "\u0120Aim", - "\u00ec\u013f\u00b4\u00ec\u00a6\u012a", - "scene", - "\u0120leakage", - "uckt", - "Sad", - "Ask", - "\u0120suspense", - "\u0120impost", - "\u0120Strategic", - "\u0120It\u00c3\u0143s", - "\u00e2\u0122\u012e", - "\u0120keyboards", - "\u0120amusing", - "ogr", - "iderman", - "\u0140\u0138", - "\u0120\u00d0\u00b2\u00d0\u00b8\u00d0\u00b6\u00d1\u0125", - "\u0120dips", - "\u0120apologized", - "\u0120STAR", - "\u0120escuela", - "\u0120Ching", - "\u00d0\u00bd\u00d0\u00b5\u00d0\u00bd\u00d0\u00b8\u00d1\u0131", - "\u0120\u00eb\u00b6\u0122\u00eb\u00b6\u0126\u00ec\u013f\u00b4", - "\u0120Fleet", - "\u0120samb", - "\u0120entsprechend", - "\u0120electrodes", - "\u0120Freiheit", - "\u00e6\u012a\u0133\u00e4\u00b8\u012f\u00e7\u0141\u00a5\u00e9\u0123\u0135", - "\u0120Shrim", - "i\u00c3\u0141e", - "\u0120selections", - "\u0120fordi", - "\u0120doss", - "\u00d1\u0131\u00d1\u0129", - "\u0120discriminate", - "\u0120Au\u00c3\u0141erdem", - "\u0120desenvolv", - "\u0120Internal", - "\u0120Benedict", - "\u00e5\u00af\u0128", - "\u0120Shiv", - "Missy", - "\u0120\u00d0\u00be\u00d0\u00b1\u00d0\u00bd\u00d0\u00b0\u00d1\u0122\u00d1\u0125\u00d0\u00b6", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d1\u0123\u00d1\u0124\u00d1\u0122\u00d0\u00be", - "\u0120controlar", - "\u0120Lia", - "\u0120opioids", - "antu", - "\u0120cupboard", - "\u00e6\u0123\u0132", - "\u00d0\u00b3\u00d0\u00b5", - "achts", - "\u0120curated", - "\u0120xem", - "\u0120weary", - "\u0120brethren", - "\u0120budgeting", - "\u0120pourtant", - "\u00e9\u013c\u00bb", - "aisia", - "\u0120\u00d0\u00be\u00d1\u0124\u00d0\u00b2\u00d0\u00b5\u00d1\u0129", - "\u0120GIS", - "\u00ce\u00bc\u00ce\u00b1\u00ce\u00b9", - "\u0120\u00d7\u00a9\u00d7\u0136\u00d7\u0137\u00d7\u0132", - "\u0120saud", - "\u0120l\u00e1\u00bb\u013d", - "\u00d0\u0137\u00d0\u00a2", - "ubine", - "\u0120\u00d0\u00bd\u00d1\u0125\u00d0\u00b6\u00d0\u00b5\u00d0\u00bd", - "\u0120kidnapping", - "\u0120brat", - "\u0120Terre", - "\u0120Monet", - "\u0120\u00eb\u00a7\u012a\u00ec\u012c\u00a4\u00ed\u0123", - "\u0120flashy", - "\u0120ISBN", - "\u0120freelance", - "iage", - "\u0120junge", - "\u00ec\u00b6\u00a9", - "ceral", - "\u0120\u00d1\u0124\u00d0\u00be\u00d1\u0129\u00d0\u00ba\u00d0\u00b8", - "\u0120formulate", - "\u0120FER", - "\u0120Dartmouth", - "\u00ec\u013e\u00bc\u00eb\u00a9\u00b4\u00ec\u0126\u013e", - "\u00e5\u00a2\u0125", - "owi\u00c4\u0127", - "\u0120\u00eb\u0136\u0136\u00ec\u0140\u0132", - "\u0120regiment", - "\u0120metabolismo", - "\u0120Parr", - "\u0120\u00ec\u00b6\u00a9\u00eb\u00b6\u0126", - "\u0120sanity", - "\u0120Lal", - "\u0120G\u00c3\u00b6", - "\u0120Gla", - "\u0120proto", - "\u0120microscopic", - "\u0120kang", - "\u0120Scalia", - "\u0120pug", - "\u0120Score", - "\u0120Savannah", - "\u0120garde", - "\u0120NOR", - "\u00e5\u00b0\u012f\u00e5\u0132\u00a7", - "\u0120scheint", - "\u0120p\u00c3\u00b3\u00c5\u0124", - "\u0120corri", - "\u0120brute", - "\u0120\u00c5\u0124ad", - "\u00e4\u00bb\u0138\u00e4\u00bb\u00ac", - "\u0120succeeding", - "\u0120bicycles", - "Non", - "\u0120seekers", - "\u0120unconditional", - "\u0120rhymes", - "\u0120Garage", - "\u0120invoice", - "\u0120canvi", - "neck", - "\u0120customizable", - "iritual", - "Queen", - "\u00ed\u0137\u013a\u00ec\u012d\u013e\u00eb\u012c\u0136", - "\u0120powerless", - "\u0120csak", - "\u00e4\u00b8\u012f\u00e4\u00bc\u013c", - "isoft", - "\u0120\u00ec\u0142\u0137\u00ed\u013b\u0137", - "\u0120nh\u00c3\u00a2n", - "\u0120MAND", - "\u0120Haf", - "\u0120revolves", - "\u00e4\u00b9\u0141\u00e5\u0131\u00af\u00e4\u00bb\u00a5", - "ovan", - "aroo", - "\u0120Grind", - "\u00e9\u013d\u00aa", - "\u0120indispensable", - "\u0120consulted", - "\u0120Clinical", - "Acc", - "\u0120olhos", - "\u0120monter", - "\u0120Hana", - "etah", - "\u0120vaan", - "\u0120tigers", - "\u0120caucus", - "\u00f0\u0141\u013a\u0124", - "\u00b3\u00b4\u00ec\u0140\u0132", - "powers", - "iums", - "\u0120\u00ed\u0128\u0142\u00eb", - "\u0120tradicional", - "\u0120resonated", - "\u0120\u00ec\u012d\u0142\u00ea\u00b8\u00b0", - "them", - "Robert", - "\u0120elemento", - "\u0120antid", - "\u0120\u00d0\u00be\u00d0\u00b1\u00d1\u0123", - "\u0120natives", - "\u0120loca", - "owment", - "\u0120Tight", - "\u0120\u00e6\u0122\u013f", - "\u0120melan", - "\u0120Nue", - "amis", - "\u0120sorgen", - "as\u00c4\u00b1na", - "Home", - "\u0120PUBG", - "\u0120awfully", - "\u0120Shore", - "\u0120Perch\u00c3\u00a9", - "\u0120Lau", - "\u0120Cinderella", - "\u0120Chest", - "\u0120semantic", - "\u0120deserted", - "\u0120Momo", - "\u0120Hernandez", - "genes", - "\u0120Adult", - "\u00d0\u00b8\u00d1\u0129\u00d0\u00b5\u00d1\u0123\u00d0\u00ba\u00d0\u00be\u00d0\u00b3\u00d0\u00be", - "oshima", - "\u0120caracter\u00c3\u0143sticas", - "\u0120KL", - "\u00b4\u00ec\u0140\u00a5", - "ocar", - "\u0120fehlt", - "\u0120druk", - "\u0120Poppy", - "ENGLISH", - "\u0120Vergleich", - "Brien", - "\u0120recomp", - "\u0120\u00d1\u0123\u00d0\u00b4", - "\u0120merger", - "\u0120marketers", - "\u0120honeymoon", - "\u0120penso", - "\u0120belli", - "\u00d0\u00b5\u00d1\u0124\u00d1\u0125", - "\u0120banker", - "Camera", - "\u0120Stall", - "\u0120Stamp", - "\u0120Bite", - "\u00d0\u00b5\u00d0\u00b6\u00d0\u00b4\u00d0\u00b5", - "\u0120s\u00c3\u00bcr", - "\u0120g\u00c3\u00bc\u00c3\u00a7", - "\u0120Passover", - "\u0120Bug\u00c3\u00bcn", - "\u0120\u00d1\u0123\u00d0\u00be\u00d0\u00b6\u00d0\u00b0\u00d0\u00bb\u00d0\u00b5\u00d0\u00bd\u00d0\u00b8\u00d1\u0130", - "\u0120\u00d0\u00bd\u00d0\u00b8\u00d0\u00b7", - "\u0120manure", - "\u0120glacier", - "\u00e8\u00ab\u0129", - "RAY", - "terror", - "\u0120salads", - "\u0120hurricanes", - "\u0120Designer", - "atorio", - "\u0120factual", - "\u0120Tammy", - "\u0120\u00d0\u00b7\u00d0\u00b2\u00d1\u0125\u00d1\u0129", - "\u0120introductions", - "\u0120housekeeping", - "\u0120hanger", - "\u00eb\u012d\u013a\u00eb", - "akte", - "\u0120Cola", - "']", - "\u0120Gender", - "\u00d0\u00be\u00d1\u0122\u00d0\u00be\u00d0\u00bd", - "ipse", - "icias", - "\u0120successive", - "\u0120politic", - "\u0120h\u00c3\u00b6her", - "\u0120Qiao", - "\u0120Gimme", - "\u0120\u00d0\u00bb\u00d0\u00be\u00d0\u00b6", - "\u0120seb", - "\u0120Weiter", - "\u0120Sakura", - "\u0120Boulder", - "\u0120Am\u00c3\u00a9rica", - "pe\u00c5\u0124nie", - "\u0120tecnolog\u00c3\u0143a", - "ishops", - "fur", - "\u0120moonlight", - "\u0120dispersed", - "\u0120rez", - "\u00d0\u00b5\u00d0\u00bd\u00d0\u00bd\u00d0\u00be\u00d0\u00b5", - "\u00d0\u00b0\u00d0\u00bb\u00d1\u012e\u00d0\u00bd\u00d1\u0125\u00d1\u0130", - "\u0120Twelve", - "\u0120HOR", - "\u00ec\u012d\u00a4\u00ed\u0140\u012a", - "ilage", - "\u0120shaded", - "\u0120resumes", - "\u0120Peanut", - "\u0120MILL", - "apons", - "\u0120UFC", - "\u0120Sole", - "\u0120joystick", - "\u0120Olivier", - "warming", - "\u0120syllabus", - "\u0120\u00d0\u00be\u00d0\u00b1\u00d1\u012b\u00d0\u00b5", - "\u0120hi\u00e1\u00bb\u0129n", - "\u0120festa", - "\u0120cradle", - "\u0120Zac", - "\u0120remembrance", - "\u0120\u00ea\u00b0\u013b\u00ec\u0137\u0126\u00ec\u0126\u013e", - "\u0120pi\u00c4\u013bk", - "\u0120coexist", - "\u0120VII", - "\u0120\u00c3\u00a1reas", - "\u0120uwa\u00c5\u00bc", - "\u0120observers", - "\u0120m\u00c3\u00a4nniskor", - "coon", - "\u0120DAM", - "\u0120naszym", - "\u0120alligator", - "\u0120Freeze", - "\u0120Estate", - "\u0120\u00d1\u0124\u00d1\u0122\u00d0\u00b0\u00d0\u00b4\u00d0\u00b8", - "\u0120undercover", - "\u0120nies", - "\u0120Fehler", - "plin", - "\u0120Kabul", - "ilate", - "\u0120\u00ea\u00b3\u0142\u00ec\u0138\u0133", - "\u0120mop", - "\u00ec\u0126\u00bc", - "\u0120anderer", - "\u0120KELL", - "\u00d0\u00be\u00d0\u00ba\u00d0\u00b8", - "\u0120\u00d0\u00b6\u00d0\u00b5\u00d1\u0123\u00d1\u0124", - "\u0120grazing", - "\u0120da\u00c3\u0143", - "\u0120capitalize", - "\u0120apex", - "\u0120nurturing", - "\u0120cortar", - "\u0120contrac", - "\u00c4\u00b1m\u00c4\u00b1z\u00c4\u00b1", - "\u0120tandem", - "\u00e9\u0125\u00bd\u00e6\u013e\u012b", - "gement", - "\u0120\u00d1\u0123\u00d0\u00b8\u00d1\u0123\u00d1\u0124\u00d0\u00b5\u00d0\u00bc\u00d0\u00b0", - "\u0120manque", - "iaj\u00c4\u0127", - "WOR", - "\u0120\u00d8\u00a7\u00d8\u00a8", - "\u0120carts", - "ANO", - "\u0120\u00eb\u00b0\u013d\u00ea\u00b3\u0142", - "\u0120Cena", - "\u0120Biology", - "idar", - "\u0120a\u00c5\u00bc", - "erne", - "anu", - "\u0120thanked", - "\u0120submarines", - "\u0120manic", - "\u0120\u00d0\u00bc\u00d0\u00be\u00d0\u00b7", - "\u00e4\u00bc\u012c", - "instant", - "essential", - "\u0120samurai", - "\u0120pasti", - "\u0120alan", - "\u0120broch", - "\u0120baker", - "\u0120Guill", - "\u00a8\u00bc", - "\u0120withdrawn", - "\u00eb\u012d\u013f", - "Perfect", - "quency", - "\u0120streamlined", - "\u01201300", - "\u00b4\u00eb\u0131\u0126", - "\u0120\u00eb\u0138\u0142\u00eb", - "\u0120\u00e3\u0123\u00af\u00e3\u0123\u0126", - "\u0120hvad", - "\u00e4\u00b8\u0122\u00e5\u00ae\u013c\u00e8\u00a6\u0123", - "\u0120verbally", - "\u0120Kons", - "\u0120\u00ec\u00a1\u00b0\u00ec\u012d\u00ac", - "\u0120diez", - "\u00e6\u0130\u00b0\u00e6\u0130\u00b0", - "\u0120chuckling", - "\u0120Mih", - "\u0120rallies", - "\u0120manter", - "\u0120earnest", - "super", - "\u0120gece", - "\u0120Rend", - "\u0120Gerade", - "jenigen", - "\u0120Vall", - "\u0120\u00ec\u0140\u012a\u00eb\u0124\u013a", - "\u0120\u00d1\u0123\u00d0\u00ba\u00d0\u00b0\u00d0\u00b7\u00d0\u00b0\u00d0\u00bb\u00d0\u00b0", - "\u0120trabalh", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d1\u012a\u00d0\u00b5\u00d0\u00bc", - "\u0120\u00d0\u00bc\u00d0\u00b5\u00d1\u0127", - "ikit", - "\u0120nouns", - "\u0120neurological", - "\u0120motivational", - "\u0120McMahon", - "\u0120Finished", - "\u0120\u00eb\u00b3\u00b4\u00ec\u013f\u00b4", - "\u0120Fields", - "\u0120adolescents", - "\u0120Tisch", - "\u0120Neben", - "\u0120Flowers", - "\u0120Energ", - "\u0120diret", - "\u0120Thi", - "\u0120Picas", - "\u00e6\u0125\u013e", - "\u00e6\u0122\u0130\u00e4\u00b9\u012a\u00e6\u0142\u00b7", - "\u0120avete", - "\u0120Fors", - "\u0120Chapel", - "N\u00c3\u00a3o", - "Et", - "\u0120\u00d1\u0123\u00d0\u00be\u00d0\u00b4\u00d0\u00b5\u00d1\u0122\u00d0\u00b6", - "reno", - "\u0120sven", - "\u0120dost\u00c4\u013bp", - "nee", - "\u0120Snapdragon", - "\u0120IDs", - "\u00ec\u0137\u013a\u00eb\u012c\u0136\u00eb\u012f\u00b0", - "\u00d7\u00a8\u00d7\u013c", - "\u0120sunflower", - "\u0120perpetual", - "\u00e7\u00b3\u0138", - "\u0120knights", - "\u0120gird", - "\u0120Told", - "\u0120volcanoes", - "\u0120adversary", - "\u0120Economy", - "\u0120extrapol", - "\u0120bluetooth", - "\u0120zooming", - "\u0120skys", - "\u0120genial", - "\u00c3\u0143culos", - "ambre", - "\u0120\u00d0\u00bc\u00d0\u00b5\u00d1\u0122", - "\u0120teeny", - "\u0120stressing", - "\u00ec\u0137\u012e", - "ONY", - "\u0120translucent", - "\u0120rounding", - "\u0120grues", - "\u00d7\u013b\u00d7\u0142\u00d7\u0136", - "apr\u00c3\u00a8s", - "\u0120prueba", - "\u0120polygon", - "\u0120blueberry", - "\u0120Programm", - "\u0120trenches", - "\u0120sebagai", - "\u0120palate", - "\u0120laude", - "\u0120behaved", - "\u0120longitudinal", - "\u0120Module", - "\u0120admir", - "\u00ce\u00bb\u00ce\u00b9", - "Greg", - "\u0120wyst", - "\u0120propagate", - "\u0120molds", - "\u0120Tub", - "\u0120Loud", - "usto", - "\u0120unstoppable", - "\u0120reinforcing", - "\u00e9\u013f\u0140\u00e5\u00b8\u00b8\u00e7\u013c\u0126", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d0\u00b1\u00d0\u00bb\u00d0\u00b5\u00d0\u00bc\u00d0\u00b0", - "\u0120potencial", - "\u0120hemp", - "\u00ec\u0140\u0136", - "\u00e0\u00a4\u00af", - "\u0120optic", - "\u0120erfolgreich", - "\u00d1\u0123\u00d1\u012d", - "\u00d0\u00be\u00d0\u00bb\u00d1\u012e\u00d1\u012a\u00d0\u00b5", - "urst", - "\u0120Pois", - "\u0120respondents", - "\u0120nehme", - "\u0120External", - "olate", - "Hyun", - "\u0120quartz", - "\u0120mathematician", - "\u0120b\u00c3\u00a1sicamente", - "\u0120ail", - "\u00ec\u0142\u013e\u00eb\u00a5\u00bc", - "attutto", - "\u0120nooit", - "\u0120afflict", - "\u0120Olga", - "\u00e8\u0143\u00b7", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d1\u0124", - "\u0120dites", - "\u0120realidade", - "\u0120k\u00c3\u00a4n", - "\u0120uniqueness", - "\u0120padres", - "\u0120subsidi", - "\u0120pigeons", - "\u00ce\u00b2\u00ce\u00b1", - "stad", - "\u0120deren", - "\u0120\u00d0\u00a1\u00d0\u00bb\u00d0\u00b5\u00d0\u00b4", - "doo", - "\u0120\u00d0\u00be\u00d0\u00bf\u00d0\u00b8\u00d1\u0123\u00d0\u00b0\u00d0\u00bd\u00d0\u00b8\u00d0\u00b8", - "\u0120amber", - "\u0120goosebumps", - "\u0120fr\u00c3\u00a5gor", - "\u0120Vital", - "\u0120Israelites", - "wasser", - "Isn", - "\u0120commits", - "\u0120STEVEN", - "\u0120Bev\u00c3\u00b6lker", - "uitive", - "\u0120legen", - "\u0120bruk", - "\u00d0\u00b8\u00d1\u0122\u00d0\u00be\u00d0\u00b2\u00d0\u00b0\u00d0\u00bd", - "ynen", - "helm", - "\u0120generational", - "\u0120L\u00c3\u00a4ndern", - "\u00ce\u00bf\u00ce\u00b9\u00cf\u0122\u00cf\u012e\u00ce\u00bd", - "uzu", - "\u0120caller", - "\u00d0\u00be\u00d0\u00bd\u00d1\u012e", - "\u00c3\u00bcm\u00c3\u00bc", - "\u0120besar", - "\u0120plats", - "\u0120migrated", - "\u0120jap", - "\u0120WAR", - "\u0120dissect", - "\u0120Zusch", - "\u0120Zeiten", - "\u0120Lions", - "\u0120DF", - "\u00e2\u0136", - "\u00d0\u00ba\u00d0\u00b8\u00d0\u00b2", - "\u0120pedestrians", - "\u0120Marilyn", - "dock", - "\u0120yht", - "\u0120reincarn", - "\u0120Sono", - "\u0120Growth", - "\u00d1\u0125\u00d1\u0123\u00d0\u00be\u00d0\u00b2", - "\u0120dungeons", - "\u0120bagus", - "kich", - "\u0120\u00d1\u0125\u00d0\u00ba\u00d1\u0122\u00d0\u00b0\u00d1\u0139", - "\u00e9\u0128\u00ab", - "\u0120Keller", - "chemistry", - "Japanese", - "\u0120willst", - "\u0120decomposition", - "\u0120\u00d1\u0123\u00d1\u0124\u00d0\u00b5\u00d0\u00bd", - "\u0120revived", - "\u00ed\u0137\u013b\u00ea\u00b5\u0132", - "\u0120\u00c5\u0135", - "\u00e4\u00bd\u0132", - "\u00ec\u012d\u00b8", - "ippy", - "\u0120hourly", - "j\u00c3\u00a4n", - "\u0120Workshop", - "\u013f\u00bc\u00ec\u0126\u013e", - "\u0120cuarto", - "\u0120patrim", - "\u0120Burch", - "\u0120\u00ec\u0140\u012a\u00ea\u00b8\u00b0", - "\u0120hepat", - "\u0120h\u00c3\u0142ng", - "\u0120\u00eb\u012e\u0122\u00ed\u0137\u00b4", - "\u0120\u00d0\u00b2\u00d0\u00b0\u00d1\u012a\u00d0\u00b8", - "\u0120rework", - "\u0120parse", - "\u0120\u00c3\u00a7\u00c4\u00b1kt\u00c4\u00b1", - "\u0120Sax", - "\u0120Mongo", - "\u0120Aaah", - "ramble", - "DJ", - "\u0120stabilized", - "\u0120Speech", - "Books", - "\u0120hurdles", - "\u0120WO", - "\u0120Lamborg", - "\u01201933", - "\u0120vorbere", - "\u0120clinically", - "\u0120breathtaking", - "\u0120Gateway", - "\u00d0\u00bf\u00d0\u00b5\u00d1\u0122\u00d0\u00b2\u00d1\u012d\u00d1\u0127", - "uters", - "\u0120\u00eb\u00b9\u00b5", - "\u0120yeter", - "\u0120pulley", - "\u0120muffin", - "\u0120Prefer", - "\u0120Pence", - "\u0120informa\u00c3\u00a7\u00c3\u00a3o", - "\u00ec\u012c\u00a4\u00ed\u012c\u00b8\u00eb", - "\u00e3\u0124\u00b8\u00e3\u0125\u00a3", - "\u0120Turtle", - "\u0120Regina", - "\u0120Load", - "does", - "panze", - "\u00b8\u0136", - "\u0120mina", - "\u0120Latinos", - "ammers", - "\u0120Tort", - "\u0120Beyonce", - "\u00d0\u00b8\u00d0\u00bc\u00d0\u00be\u00d1\u0123\u00d1\u0124\u00d0\u00b8", - "\u0120\u00d0\u00b2\u00d0\u00be\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d1\u0123\u00d1\u012d", - "\u0120bulun", - "\u00e8\u0122\u012e\u00e5\u00b7\u00b2", - "inek", - "bereich", - "\u0120pasture", - "\u0120OA", - "\u0120Melt", - "\u0120Ett", - "\u0120DY", - "\u0120obwohl", - "\u0120leagues", - "\u00d1\u0124\u00d0\u00b5\u00d1\u0123\u00d1\u012e", - "\u0120\u00d0\u00ba\u00d1\u0125\u00d1\u0123", - "\u0120vors", - "\u0120topp", - "ographical", - "asst", - "\u0120lindo", - "\u0120\u00eb\u00b0\u013f\u00ed\u013a\u0136", - "\u0120r\u00c3\u00a9fl", - "\u0120climbs", - "\u0120varsa", - "\u0120methyl", - "\u0120Karere", - "\u00c6\u00b0\u00e1\u00bb\u0141", - "Rad", - "\u0120preparedness", - "\u00d0\u00be\u00d0\u00bd\u00d1\u0129", - "\u0120OD", - "\u0120CGI", - "\u0120\u00e0\u00a4\u00ae", - "\u0120speechless", - "\u0120lasci", - "\u0120bolag", - "\u0120\u00d1\u0127\u00d0\u00be\u00d1\u0129\u00d0\u00b5\u00d1\u0124\u00d1\u0123\u00d1\u0131", - "\u0120grieving", - "\u0120Johannes", - "\u0120Carroll", - "adaki", - "\u012a\u00ac\u00eb", - "\u0120s\u00c5\u0124u", - "\u0120innerhalb", - "\u0120gymnastics", - "\u00d0\u00bf\u00d1\u0122\u00d0\u00b8", - "ifiques", - "\u0120karate", - "\u0120domu", - "\u00e3\u0123\u013f\u00e3\u0124\u012e\u00e3\u0123\u00a7", - "OTHER", - "\u0120demand\u00c3\u00a9", - "\u0120booklet", - "\u0120Kyoto", - "\u0120woh", - "\u0120Mar\u00c3\u0143a", - "violent", - "JE", - "\u0120l\u00c3\u00b3g", - "\u0120brutally", - "cot", - "\u0120\u00d9\u0127\u00db\u012e", - "\u0120Warsz", - "\u00e5\u00ae\u012a", - "wol", - "\u0120mik\u00c3\u00a4", - "\u0120Pronounce", - "\u0120Brendan", - "\u0120roup", - "\u0120italiano", - "\u00e5\u00a6\u0124\u00e6\u0143\u00a4", - "\u0120\u00d0\u00ba\u00d0\u00be\u00d0\u00bc\u00d0\u00bf\u00d1\u012e\u00d1\u0130\u00d1\u0124", - "\u0120urging", - "edes", - "\u0120carbono", - "\u0120Richardson", - "\u0120\u00d0\u013f\u00d0\u00b0\u00d1\u0129", - "\u0120Trainer", - "\u0120Crimea", - "\u0120diapers", - "\u0120covet", - "\u0120Mahar", - "\u0120Hutch", - "\u0120Ausw", - "berty", - "\u0120indifferent", - "\u00d0\u00ba\u00d1\u0122\u00d0\u00b5\u00d1\u0124", - "uldade", - "\u0120harms", - "\u00a2\u00d9\u0128", - "lesia", - "\u0120gio", - "\u0120Mistress", - "\u0120Knox", - "\u0120FREE", - "\u0120\u00eb\u00a3\u00a8\u00eb", - "\u0120\u00d0\u00bd\u00d0\u00b0\u00d1\u012a\u00d0\u00b0", - "\u0120invincible", - "\u0120maiden", - "\u0120Jeez", - "\u0120breve", - "pole", - "\u0120criticisms", - "\u0120Rusia", - "\u00e0\u00a4\u00ae", - "phin", - "\u0120Compare", - "\u0120BON", - "\u0120sneaking", - "\u0120Rails", - "\u0120Geral", - "\u01201953", - "Hola", - "\u0120\u00d0\u00be\u00d0\u00bf\u00d1\u012d\u00d1\u0124", - "\u0120rainforest", - "\u0120belum", - "\u0120Obi", - "\u0120ISS", - "\u00e3\u0124\u012e\u00e3\u0123\u00aa\u00e3\u0123\u0126", - "\u0120\u00d0\u00a1\u00d0\u00b2", - "\u0120blond", - "\u0120wzgl", - "\u0120powiedzia\u00c5\u0124", - "\u0120choking", - "\u0120Songs", - "\u0120Biraz", - "\u0120yells", - "\u0120stylist", - "\u00cf\u012e\u00cf\u0126\u00ce\u00b5", - "\u0120schreiben", - "\u0120Jaw", - "\u0120Eleven", - "\u0120Rif", - "/.", - "\u0120\u00ec\u013a\u00a4\u00eb\u0140\u013e\u00eb\u00a7\u012e", - "\u0120treaties", - "uffed", - "\u0120\u00e2\u012a\u0134", - "\u0120roofs", - "\u00e0\u00b9\u0122\u00e0\u00b8\u00aa", - "\u0120\u00eb\u00bb", - "\u0120sparkle", - "\u0120Kiev", - "\u0120Argu", - "erecht", - "\u0120\u00d0\u013f\u00d0\u00b0\u00d0\u00b4\u00d0\u00be", - "\u0120FIL", - "\u0120molta", - "\u0120Devi", - "\u0120campe", - "\u0120benevol", - "\u0120Tough", - "\u0120moim", - "\u0120evacuate", - "\u0120errado", - "\u00e5\u00a9\u0128", - "\u00d1\u0122\u00d1\u0125\u00d0\u00b3\u00d0\u00be", - "\u0120\u00ed\u0130\u013a", - "\u0120\u00ce\u0135\u00ce\u00b9\u00ce\u00b1", - "\u0120weaken", - "\u0120illuminated", - "\u0120siglo", - "\u0120Vacc", - "\u00d0\u00b8\u00d0\u00b5\u00d0\u00b9", - "alis", - "\u0120\u00d1\u0125\u00d1\u0123\u00d1\u0124\u00d1\u0122\u00d0\u00be\u00d0\u00b9", - "\u0120dona", - "\u00c5\u0124os", - "\u00c3\u00bcman", - "\u0120producci\u00c3\u00b3n", - "\u0120clot", - "\u0120Mango", - "\u0120uneasy", - "\u0120shuts", - "\u0120Examples", - "vell", - "ebe", - "\u0120promptly", - "\u0120Teles", - "\u0120\u00d0\u00bf\u00d1\u0122\u00d0\u00be\u00d1\u012a\u00d0\u00bb", - "\u0120puerta", - "\u0120\u00c3\u00bcberzeug", - "\u0120coch", - "social", - "\u0120Benson", - "\u0120Meth", - "\u0120Exped", - "\u0120supplemental", - "\u0120conceive", - "\u0120\u00d7\u013a\u00d7\u0137\u00d7\u0133", - "\u0120captivity", - "\u0131\u013b\u00ec\u0137\u012a", - "\u0120\u00d1\u0127\u00d1\u0125\u00d0\u00b4", - "forming", - "\u0120uploads", - "\u0120turbulence", - "joint", - "\u0120satisfactory", - "\u0120Anime", - "\u0120washes", - "\u0120liberals", - "\u0120Sunshine", - "\u0120REAL", - "ublik", - "binary", - "Tony", - "\u0120polarized", - "\u0120enriched", - "taking", - "\u0120\u00eb\u0123\u013f\u00eb\u0124\u013a", - "\u0120pleasures", - "\u0120extermin", - "inese", - "atl", - "v\u00c3\u00a4r", - "\u00d0\u00b0\u00d1\u0122\u00d1\u012d", - "\u0120my\u00c5\u013d", - "narrator", - "\u0120\u00d0\u00be\u00d0\u00b4\u00d0\u00bd\u00d0\u00be\u00d0\u00bc", - "\u0120najwi\u00c4\u013b", - "\u0120mobilize", - "\u0120millor", - "\u0120ata", - "\u00e6\u00b7\u00b7", - "\u0120pol\u00c3\u0143tico", - "\u0120plead", - "\u0120painters", - "\u0120Sow", - "\u00d0\u00be\u00d1\u0126", - "\u0120\u00ec\u013a\u013d\u00eb\u0124\u0142", - "\u0120\u00d1\u0129\u00d1\u0124\u00d0\u00be\u00d0\u00b1", - "\u0120sabor", - "\u0120Undert", - "\u0120JERRY", - "\u00c5\u00a1\u00c3\u0143", - "\u0120\u00eb\u00b0\u0138\u00ec\u0139\u0132", - "\u0120pr\u00c3\u00a9c\u00c3\u00a9d", - "\u0120annotation", - "\u0120Inaudible", - "\u0120textured", - "\u0120fisherman", - "vordan", - "icherung", - "\u0120\u00ec\u0142\u0123\u00ec\u013f\u00b4", - "\u0120gezeigt", - "\u0120mandates", - "\u0120beak", - "\u0120TWO", - "\u0120Akbar", - "ilian", - "\u0120ti\u00e1\u00ba\u00bfp", - "\u0120superiority", - "inku", - "\u0120lys", - "\u0120FCC", - "\u0120CPA", - "ustering", - "nicos", - "anja", - "\u0120chills", - "\u0120Cage", - "\u0120sealing", - "\u0120sa\u00c3\u00a7", - "\u0120dedans", - "\u0120Alger", - "\u0120spezie", - "\u0120coloss", - "\u00c4\u00b1y\u00c4\u00b1", - "clockwise", - "\u0120exactamente", - "\u0120iemand", - "am\u00c4\u00b1", - "\u0120mandar", - "raj", - "faced", - "agua", - "\u0120\u00ea\u00b9\u0136\u00eb", - "\u0120insbesondere", - "\u0120drizzle", - "\u0120diminish", - "\u0120Yoda", - "AI", - "\u0120bilmiyorum", - "\u0120MMA", - "ategory", - "\u0120\u00d0\u00bf\u00d0\u00b5\u00d1\u0122\u00d0\u00b5\u00d0\u00bf", - "\u0120participar", - "\u0120normalized", - "\u0120complexities", - "\u00e6\u00b4\u00b2", - "\u00e6\u0130\u00a7", - "\u00d0\u00b0\u00d1\u0122\u00d0\u00be\u00d0\u00b2", - "mist", - "icha", - "Group", - "\u0120resiliency", - "\u0120nogle", - "\u0120CNC", - "pr\u00c3\u00bc", - "\u0120physicists", - "\u00d0\u00bd\u00d0\u00be\u00d0\u00ba", - "LI", - "\u0120stuffs", - "\u0120sistemas", - "\u0120interfering", - "\u0120Marvin", - "\u00c3\u00a9rcito", - "\u0120\u00ec\u0139\u0128\u00ea\u00b3\u0142", - "\u0120sonic", - "\u0120equiv", - "\u0120abord", - "\u0120Ramen", - "\u012009", - "medim", - "atiques", - "\u0120\u00d0\u00b4\u00d0\u00b5\u00d0\u00bb\u00d0\u00b0\u00d1\u0130\u00d1\u0124", - "\u0120unanimously", - "\u0120skirts", - "\u0120\u00ed\u012c\u00b9\u00eb\u00b3\u0126", - "\u0120Prix", - "kami", - "\u0120fruition", - "\u0120birthdays", - "\u00d0\u00b8\u00d0\u00ba\u00d0\u00be\u00d0\u00bc", - "\u0120inaugural", - "\u0120correlate", - "\u0120Tory", - "\u0120\u00eb\u0124\u013a\u00ec\u0123", - "\u0120dew", - "\u0120Precis", - "ihi", - "\u0120\u00eb\u00ac\u00b8\u00ec\u0142\u013e\u00ea\u00b0\u0122", - "\u0120citing", - "\u0120Lana", - "\u0120Kag", - "\u0120playthrough", - "\u0120Protocol", - "frist", - "hovah", - "\u0120merciful", - "\u0120bilingual", - "\u0120Guitar", - "rh", - "\u0120glamorous", - "\u0120Vikings", - "\u0120Ooooh", - "\u00ed\u0137\u013a\u00eb\u012c\u0136\u00eb\u012f\u00b0", - "\u0120Uganda", - "\u0120collapses", - "entry", - "\u0120antioxidants", - "\u00eb\u0124\u013a\u00eb", - "\u00d1\u012a\u00d0\u00b0\u00d1\u0131", - "\u0120trivia", - "\u0120g\u00c3\u00a4ller", - "\u0120fungi", - "\u0120milks", - "\u0120dicht", - "\u00ce\u00bc\u00ce\u00b7", - "poke", - "\u0120\u00d0\u00b2\u00d1\u012d\u00d0\u00bf\u00d1\u0125\u00d1\u0123\u00d0\u00ba", - "\u0120feeder", - "\u0120Alcohol", - "hower", - "\u0120deserving", - "\u0120Rebel", - "iosis", - "\u0120103", - "\u0120handout", - "\u0120enm", - "\u0120landlords", - "\u0120geology", - "rils", - "\u0120cobra", - "\u0120Vold", - "\u0120Panch", - "\u0120GREG", - "\u0120pross", - "\u0120bracelets", - "\u0120Vega", - "\u0120rozum", - "\u00e6\u00ac\u00be", - "\u00d0\u00b0\u00d0\u00b7\u00d0\u00b4", - "\u0120Lynd", - "\u0120Honors", - "\u0120surrendered", - "\u0120librarians", - "125", - "\u0120\u00d1\u0123\u00d0\u00b8\u00d0\u00b3", - "\u0120uniformly", - "\u0120Eagles", - "\u00ec\u0137\u013b", - "\u00d0\u00b8\u00d1\u0124\u00d0\u00b0\u00d0\u00bd", - "andid", - "\u0120\u00ec\u0142\u012a\u00eb\u012e\u0122", - "\u0120\u00d8\u00b6", - "\u0120arrests", - "\u0120CSV", - "\u0120Azerbaijan", - "ortic", - "\u0120DX", - "\u0120Adventures", - "\u0120abus", - "\u0120Fau", - "\u0120schlimm", - "\u0120rattling", - "\u0120consumes", - "\u0120Tolkien", - "\u0120resurrected", - "\u0120XY", - "\u00ed\u012c\u00b8\u00ea\u00b0\u0122", - "\u0120\u00d0\u00b2\u00d1\u012d\u00d1\u0123\u00d1\u0124\u00d1\u0125\u00d0\u00bf", - "\u0120Angie", - "\u00c5\u00bcenia", - "Mic", - "\u0120Sheila", - "achtet", - "\u0120overst", - "\u0120l\u00c3\u00a2", - "\u0120ineffective", - "\u00e6\u013f\u00a1", - "\u00e6\u0122\u0130\u00e4\u00b9\u012a\u00e4\u00ba\u0128", - "\u00e5\u00bf\u013b", - "\u0120wichtiger", - "\u0120vino", - "\u0120pum", - "\u0120angled", - "\u0120Pione", - "\u0120M\u00e1\u00bb\u00b9", - "\u00e3\u0123\u013f\u00e3\u0124\u012e\u00e3\u0123\u00af", - "wo\u00c5\u013d\u00c4\u0129", - "draw", - "\u00e0\u00b8\u00b1\u00e0\u00b9\u012a", - "markets", - "\u0120cafes", - "\u0120Cem", - "\u00e2\u013f\u00a4", - "\u0120Suit", - "MK", - "\u0120emphasizes", - "\u0120tortilla", - "\u0120mejorar", - "\u0120Surviv", - "casting", - "\u0120educaci\u00c3\u00b3n", - "\u0120Gum", - "uely", - "\u0120\u00ec\u0139\u00ac\u00ea\u00b8\u00b0\u00eb\u012c\u0136", - "\u0120stretchy", - "en\u00c3\u00a7a", - "\u0120withhold", - "\u0120exiting", - "\u0120enthalpy", - "\u0120Transit", - "\u00c4\u00b1lm\u00c4\u00b1\u00c5\u0141", - "alies", - "\u0120salvar", - "\u0120leaned", - "\u0120gro\u00c3\u0141es", - "\u0120fitt", - "\u00d0\u00b0\u00d0\u00ba\u00d0\u00b8", - "Sarah", - "\u0120hostel", - "\u0120fingerna", - "\u0120nadziej\u00c4\u013b", - "wives", - "Rec", - "\u0120spool", - "\u00d0\u00b0\u00d1\u0124\u00d0\u00be\u00d0\u00b2", - "\u0120Enemy", - "\u0120fury", - "\u0120detta", - "\u0120Fay", - "\u00e9\u013c\u00a8", - "\u00d1\u0131\u00d1\u0130\u00d1\u0124", - "\u0120aproximadamente", - "\u0120silos", - "\u0120magist", - "\u0120cree", - "\u0120Krank", - "\u0120DOWN", - "\u0120startled", - "\u0120reborn", - "\u0120Umwelt", - "\u0120Suzanne", - "\u00d0\u00bd\u00d0\u00b8\u00d1\u0128\u00d1\u012d", - "outez", - "\u0120JAC", - "yards", - "radas", - "rau", - "ipts", - "hail", - "\u0120paragraphs", - "\u0120meglio", - "\u0120isolating", - "\u0120aceite", - "\u0120Harsh", - "\u0120cyst", - "\u0120Blockchain", - "\u0120\u00d1\u0127\u00d0\u00be\u00d1\u0122\u00d0\u00be\u00d1\u012a\u00d0\u00b8\u00d0\u00b9", - "\u0120virtuous", - "\u0120investigaci\u00c3\u00b3n", - "\u0120devoir", - "\u0120masturb", - "\u0120Sale", - "\u00d9\u012c\u00d8\u00b1\u00d8\u00a9", - "\u0120\u00ce\u00a7", - "\u0120Stra\u00c3\u0141en", - "\u0120dikk", - "\u0120afore", - "\u0120Jungkook", - "\u0120chocia\u00c5\u00bc", - "\u0120Debatte", - "\u0120weirdly", - "\u0120viaje", - "regist", - "Help", - "\u0120kinderen", - "\u0120formulated", - "\u0120enfim", - "\u0120Towards", - "\u00d0\u00ba\u00d0\u00be\u00d1\u0139", - "ivering", - "\u0120\u00d0\u00b4\u00d0\u00b5\u00d1\u0124\u00d0\u00b8", - "charger", - "\u0120purl", - "\u0120academically", - "\u0120Nurse", - "\u0120deleting", - "ayo", - "\u0120refusal", - "\u0120depicts", - "\u0120Dracula", - "\u0120toasted", - "\u0120Zombie", - "\u0120Superior", - "\u0120Bold", - "\u0120quizzes", - "\u0120gle", - "450", - "\u0120come\u00c3\u00a7o", - "ynn", - "\u0120verst", - "\u0120Olaf", - "\u0120pomoc", - "\u0120Sask", - "\u00eb\u013a", - "\u0120TCP", - "\u0120Property", - "\u00ed\u0137\u013a\u00ec\u00a3\u0142", - "\u00e0\u00b8\u013e\u00e0\u00b8\u00a1", - "boom", - "aros", - "\u0120\u00d1\u0122\u00d0\u00be\u00d1\u0123\u00d1\u0123\u00d0\u00b8\u00d0\u00b9", - "\u0120\u00d0\u00b1\u00d1\u012d\u00d0\u00b2\u00d0\u00b0\u00d0\u00b5\u00d1\u0124", - "\u00e5\u0129\u00ba\u00e5\u0130\u00bb", - "\u0120\u00ec\u013f\u00b4\u00ec\u0137\u00bc\u00ea\u00b8\u00b0\u00eb\u00a5\u00bc", - "\u0120combien", - "vacc", - "\u0120ebenfalls", - "para", - "\u0120\u00d0\u00b7\u00d0\u00bc", - "\u0120desperation", - "ordre", - "\u0120\u00d7\u00a9\u00d7\u013e\u00d7\u013b", - "\u0120generously", - "\u0120\u00d0\u0140\u00d0\u00ba", - "\u0120orbiting", - ">", - "<|startoftranscript|>", - "<|en|>", - "<|zh|>", - "<|de|>", - "<|es|>", - "<|ru|>", - "<|ko|>", - "<|fr|>", - "<|ja|>", - "<|pt|>", - "<|tr|>", - "<|pl|>", - "<|ca|>", - "<|nl|>", - "<|ar|>", - "<|sv|>", - "<|it|>", - "<|id|>", - "<|hi|>", - "<|fi|>", - "<|vi|>", - "<|he|>", - "<|uk|>", - "<|el|>", - "<|ms|>", - "<|cs|>", - "<|ro|>", - "<|da|>", - "<|hu|>", - "<|ta|>", - "<|no|>", - "<|th|>", - "<|ur|>", - "<|hr|>", - "<|bg|>", - "<|lt|>", - "<|la|>", - "<|mi|>", - "<|ml|>", - "<|cy|>", - "<|sk|>", - "<|te|>", - "<|fa|>", - "<|lv|>", - "<|bn|>", - "<|sr|>", - "<|az|>", - "<|sl|>", - "<|kn|>", - "<|et|>", - "<|mk|>", - "<|br|>", - "<|eu|>", - "<|is|>", - "<|hy|>", - "<|ne|>", - "<|mn|>", - "<|bs|>", - "<|kk|>", - "<|sq|>", - "<|sw|>", - "<|gl|>", - "<|mr|>", - "<|pa|>", - "<|si|>", - "<|km|>", - "<|sn|>", - "<|yo|>", - "<|so|>", - "<|af|>", - "<|oc|>", - "<|ka|>", - "<|be|>", - "<|tg|>", - "<|sd|>", - "<|gu|>", - "<|am|>", - "<|yi|>", - "<|lo|>", - "<|uz|>", - "<|fo|>", - "<|ht|>", - "<|ps|>", - "<|tk|>", - "<|nn|>", - "<|mt|>", - "<|sa|>", - "<|lb|>", - "<|my|>", - "<|bo|>", - "<|tl|>", - "<|mg|>", - "<|as|>", - "<|tt|>", - "<|haw|>", - "<|ln|>", - "<|ha|>", - "<|ba|>", - "<|jw|>", - "<|su|>", - "<|yue|>", - "<|translate|>", - "<|transcribe|>", - "<|startoflm|>", - "<|startofprev|>", - "<|nospeech|>", - "<|notimestamps|>", - "<|0.00|>", - "<|0.02|>", - "<|0.04|>", - "<|0.06|>", - "<|0.08|>", - "<|0.10|>", - "<|0.12|>", - "<|0.14|>", - "<|0.16|>", - "<|0.18|>", - "<|0.20|>", - "<|0.22|>", - "<|0.24|>", - "<|0.26|>", - "<|0.28|>", - "<|0.30|>", - "<|0.32|>", - "<|0.34|>", - "<|0.36|>", - "<|0.38|>", - "<|0.40|>", - "<|0.42|>", - "<|0.44|>", - "<|0.46|>", - "<|0.48|>", - "<|0.50|>", - "<|0.52|>", - "<|0.54|>", - "<|0.56|>", - "<|0.58|>", - "<|0.60|>", - "<|0.62|>", - "<|0.64|>", - "<|0.66|>", - "<|0.68|>", - "<|0.70|>", - "<|0.72|>", - "<|0.74|>", - "<|0.76|>", - "<|0.78|>", - "<|0.80|>", - "<|0.82|>", - "<|0.84|>", - "<|0.86|>", - "<|0.88|>", - "<|0.90|>", - "<|0.92|>", - "<|0.94|>", - "<|0.96|>", - "<|0.98|>", - "<|1.00|>", - "<|1.02|>", - "<|1.04|>", - "<|1.06|>", - "<|1.08|>", - "<|1.10|>", - "<|1.12|>", - "<|1.14|>", - "<|1.16|>", - "<|1.18|>", - "<|1.20|>", - "<|1.22|>", - "<|1.24|>", - "<|1.26|>", - "<|1.28|>", - "<|1.30|>", - "<|1.32|>", - "<|1.34|>", - "<|1.36|>", - "<|1.38|>", - "<|1.40|>", - "<|1.42|>", - "<|1.44|>", - "<|1.46|>", - "<|1.48|>", - "<|1.50|>", - "<|1.52|>", - "<|1.54|>", - "<|1.56|>", - "<|1.58|>", - "<|1.60|>", - "<|1.62|>", - "<|1.64|>", - "<|1.66|>", - "<|1.68|>", - "<|1.70|>", - "<|1.72|>", - "<|1.74|>", - "<|1.76|>", - "<|1.78|>", - "<|1.80|>", - "<|1.82|>", - "<|1.84|>", - "<|1.86|>", - "<|1.88|>", - "<|1.90|>", - "<|1.92|>", - "<|1.94|>", - "<|1.96|>", - "<|1.98|>", - "<|2.00|>", - "<|2.02|>", - "<|2.04|>", - "<|2.06|>", - "<|2.08|>", - "<|2.10|>", - "<|2.12|>", - "<|2.14|>", - "<|2.16|>", - "<|2.18|>", - "<|2.20|>", - "<|2.22|>", - "<|2.24|>", - "<|2.26|>", - "<|2.28|>", - "<|2.30|>", - "<|2.32|>", - "<|2.34|>", - "<|2.36|>", - "<|2.38|>", - "<|2.40|>", - "<|2.42|>", - "<|2.44|>", - "<|2.46|>", - "<|2.48|>", - "<|2.50|>", - "<|2.52|>", - "<|2.54|>", - "<|2.56|>", - "<|2.58|>", - "<|2.60|>", - "<|2.62|>", - "<|2.64|>", - "<|2.66|>", - "<|2.68|>", - "<|2.70|>", - "<|2.72|>", - "<|2.74|>", - "<|2.76|>", - "<|2.78|>", - "<|2.80|>", - "<|2.82|>", - "<|2.84|>", - "<|2.86|>", - "<|2.88|>", - "<|2.90|>", - "<|2.92|>", - "<|2.94|>", - "<|2.96|>", - "<|2.98|>", - "<|3.00|>", - "<|3.02|>", - "<|3.04|>", - "<|3.06|>", - "<|3.08|>", - "<|3.10|>", - "<|3.12|>", - "<|3.14|>", - "<|3.16|>", - "<|3.18|>", - "<|3.20|>", - "<|3.22|>", - "<|3.24|>", - "<|3.26|>", - "<|3.28|>", - "<|3.30|>", - "<|3.32|>", - "<|3.34|>", - "<|3.36|>", - "<|3.38|>", - "<|3.40|>", - "<|3.42|>", - "<|3.44|>", - "<|3.46|>", - "<|3.48|>", - "<|3.50|>", - "<|3.52|>", - "<|3.54|>", - "<|3.56|>", - "<|3.58|>", - "<|3.60|>", - "<|3.62|>", - "<|3.64|>", - "<|3.66|>", - "<|3.68|>", - "<|3.70|>", - "<|3.72|>", - "<|3.74|>", - "<|3.76|>", - "<|3.78|>", - "<|3.80|>", - "<|3.82|>", - "<|3.84|>", - "<|3.86|>", - "<|3.88|>", - "<|3.90|>", - "<|3.92|>", - "<|3.94|>", - "<|3.96|>", - "<|3.98|>", - "<|4.00|>", - "<|4.02|>", - "<|4.04|>", - "<|4.06|>", - "<|4.08|>", - "<|4.10|>", - "<|4.12|>", - "<|4.14|>", - "<|4.16|>", - "<|4.18|>", - "<|4.20|>", - "<|4.22|>", - "<|4.24|>", - "<|4.26|>", - "<|4.28|>", - "<|4.30|>", - "<|4.32|>", - "<|4.34|>", - "<|4.36|>", - "<|4.38|>", - "<|4.40|>", - "<|4.42|>", - "<|4.44|>", - "<|4.46|>", - "<|4.48|>", - "<|4.50|>", - "<|4.52|>", - "<|4.54|>", - "<|4.56|>", - "<|4.58|>", - "<|4.60|>", - "<|4.62|>", - "<|4.64|>", - "<|4.66|>", - "<|4.68|>", - "<|4.70|>", - "<|4.72|>", - "<|4.74|>", - "<|4.76|>", - "<|4.78|>", - "<|4.80|>", - "<|4.82|>", - "<|4.84|>", - "<|4.86|>", - "<|4.88|>", - "<|4.90|>", - "<|4.92|>", - "<|4.94|>", - "<|4.96|>", - "<|4.98|>", - "<|5.00|>", - "<|5.02|>", - "<|5.04|>", - "<|5.06|>", - "<|5.08|>", - "<|5.10|>", - "<|5.12|>", - "<|5.14|>", - "<|5.16|>", - "<|5.18|>", - "<|5.20|>", - "<|5.22|>", - "<|5.24|>", - "<|5.26|>", - "<|5.28|>", - "<|5.30|>", - "<|5.32|>", - "<|5.34|>", - "<|5.36|>", - "<|5.38|>", - "<|5.40|>", - "<|5.42|>", - "<|5.44|>", - "<|5.46|>", - "<|5.48|>", - "<|5.50|>", - "<|5.52|>", - "<|5.54|>", - "<|5.56|>", - "<|5.58|>", - "<|5.60|>", - "<|5.62|>", - "<|5.64|>", - "<|5.66|>", - "<|5.68|>", - "<|5.70|>", - "<|5.72|>", - "<|5.74|>", - "<|5.76|>", - "<|5.78|>", - "<|5.80|>", - "<|5.82|>", - "<|5.84|>", - "<|5.86|>", - "<|5.88|>", - "<|5.90|>", - "<|5.92|>", - "<|5.94|>", - "<|5.96|>", - "<|5.98|>", - "<|6.00|>", - "<|6.02|>", - "<|6.04|>", - "<|6.06|>", - "<|6.08|>", - "<|6.10|>", - "<|6.12|>", - "<|6.14|>", - "<|6.16|>", - "<|6.18|>", - "<|6.20|>", - "<|6.22|>", - "<|6.24|>", - "<|6.26|>", - "<|6.28|>", - "<|6.30|>", - "<|6.32|>", - "<|6.34|>", - "<|6.36|>", - "<|6.38|>", - "<|6.40|>", - "<|6.42|>", - "<|6.44|>", - "<|6.46|>", - "<|6.48|>", - "<|6.50|>", - "<|6.52|>", - "<|6.54|>", - "<|6.56|>", - "<|6.58|>", - "<|6.60|>", - "<|6.62|>", - "<|6.64|>", - "<|6.66|>", - "<|6.68|>", - "<|6.70|>", - "<|6.72|>", - "<|6.74|>", - "<|6.76|>", - "<|6.78|>", - "<|6.80|>", - "<|6.82|>", - "<|6.84|>", - "<|6.86|>", - "<|6.88|>", - "<|6.90|>", - "<|6.92|>", - "<|6.94|>", - "<|6.96|>", - "<|6.98|>", - "<|7.00|>", - "<|7.02|>", - "<|7.04|>", - "<|7.06|>", - "<|7.08|>", - "<|7.10|>", - "<|7.12|>", - "<|7.14|>", - "<|7.16|>", - "<|7.18|>", - "<|7.20|>", - "<|7.22|>", - "<|7.24|>", - "<|7.26|>", - "<|7.28|>", - "<|7.30|>", - "<|7.32|>", - "<|7.34|>", - "<|7.36|>", - "<|7.38|>", - "<|7.40|>", - "<|7.42|>", - "<|7.44|>", - "<|7.46|>", - "<|7.48|>", - "<|7.50|>", - "<|7.52|>", - "<|7.54|>", - "<|7.56|>", - "<|7.58|>", - "<|7.60|>", - "<|7.62|>", - "<|7.64|>", - "<|7.66|>", - "<|7.68|>", - "<|7.70|>", - "<|7.72|>", - "<|7.74|>", - "<|7.76|>", - "<|7.78|>", - "<|7.80|>", - "<|7.82|>", - "<|7.84|>", - "<|7.86|>", - "<|7.88|>", - "<|7.90|>", - "<|7.92|>", - "<|7.94|>", - "<|7.96|>", - "<|7.98|>", - "<|8.00|>", - "<|8.02|>", - "<|8.04|>", - "<|8.06|>", - "<|8.08|>", - "<|8.10|>", - "<|8.12|>", - "<|8.14|>", - "<|8.16|>", - "<|8.18|>", - "<|8.20|>", - "<|8.22|>", - "<|8.24|>", - "<|8.26|>", - "<|8.28|>", - "<|8.30|>", - "<|8.32|>", - "<|8.34|>", - "<|8.36|>", - "<|8.38|>", - "<|8.40|>", - "<|8.42|>", - "<|8.44|>", - "<|8.46|>", - "<|8.48|>", - "<|8.50|>", - "<|8.52|>", - "<|8.54|>", - "<|8.56|>", - "<|8.58|>", - "<|8.60|>", - "<|8.62|>", - "<|8.64|>", - "<|8.66|>", - "<|8.68|>", - "<|8.70|>", - "<|8.72|>", - "<|8.74|>", - "<|8.76|>", - "<|8.78|>", - "<|8.80|>", - "<|8.82|>", - "<|8.84|>", - "<|8.86|>", - "<|8.88|>", - "<|8.90|>", - "<|8.92|>", - "<|8.94|>", - "<|8.96|>", - "<|8.98|>", - "<|9.00|>", - "<|9.02|>", - "<|9.04|>", - "<|9.06|>", - "<|9.08|>", - "<|9.10|>", - "<|9.12|>", - "<|9.14|>", - "<|9.16|>", - "<|9.18|>", - "<|9.20|>", - "<|9.22|>", - "<|9.24|>", - "<|9.26|>", - "<|9.28|>", - "<|9.30|>", - "<|9.32|>", - "<|9.34|>", - "<|9.36|>", - "<|9.38|>", - "<|9.40|>", - "<|9.42|>", - "<|9.44|>", - "<|9.46|>", - "<|9.48|>", - "<|9.50|>", - "<|9.52|>", - "<|9.54|>", - "<|9.56|>", - "<|9.58|>", - "<|9.60|>", - "<|9.62|>", - "<|9.64|>", - "<|9.66|>", - "<|9.68|>", - "<|9.70|>", - "<|9.72|>", - "<|9.74|>", - "<|9.76|>", - "<|9.78|>", - "<|9.80|>", - "<|9.82|>", - "<|9.84|>", - "<|9.86|>", - "<|9.88|>", - "<|9.90|>", - "<|9.92|>", - "<|9.94|>", - "<|9.96|>", - "<|9.98|>", - "<|10.00|>", - "<|10.02|>", - "<|10.04|>", - "<|10.06|>", - "<|10.08|>", - "<|10.10|>", - "<|10.12|>", - "<|10.14|>", - "<|10.16|>", - "<|10.18|>", - "<|10.20|>", - "<|10.22|>", - "<|10.24|>", - "<|10.26|>", - "<|10.28|>", - "<|10.30|>", - "<|10.32|>", - "<|10.34|>", - "<|10.36|>", - "<|10.38|>", - "<|10.40|>", - "<|10.42|>", - "<|10.44|>", - "<|10.46|>", - "<|10.48|>", - "<|10.50|>", - "<|10.52|>", - "<|10.54|>", - "<|10.56|>", - "<|10.58|>", - "<|10.60|>", - "<|10.62|>", - "<|10.64|>", - "<|10.66|>", - "<|10.68|>", - "<|10.70|>", - "<|10.72|>", - "<|10.74|>", - "<|10.76|>", - "<|10.78|>", - "<|10.80|>", - "<|10.82|>", - "<|10.84|>", - "<|10.86|>", - "<|10.88|>", - "<|10.90|>", - "<|10.92|>", - "<|10.94|>", - "<|10.96|>", - "<|10.98|>", - "<|11.00|>", - "<|11.02|>", - "<|11.04|>", - "<|11.06|>", - "<|11.08|>", - "<|11.10|>", - "<|11.12|>", - "<|11.14|>", - "<|11.16|>", - "<|11.18|>", - "<|11.20|>", - "<|11.22|>", - "<|11.24|>", - "<|11.26|>", - "<|11.28|>", - "<|11.30|>", - "<|11.32|>", - "<|11.34|>", - "<|11.36|>", - "<|11.38|>", - "<|11.40|>", - "<|11.42|>", - "<|11.44|>", - "<|11.46|>", - "<|11.48|>", - "<|11.50|>", - "<|11.52|>", - "<|11.54|>", - "<|11.56|>", - "<|11.58|>", - "<|11.60|>", - "<|11.62|>", - "<|11.64|>", - "<|11.66|>", - "<|11.68|>", - "<|11.70|>", - "<|11.72|>", - "<|11.74|>", - "<|11.76|>", - "<|11.78|>", - "<|11.80|>", - "<|11.82|>", - "<|11.84|>", - "<|11.86|>", - "<|11.88|>", - "<|11.90|>", - "<|11.92|>", - "<|11.94|>", - "<|11.96|>", - "<|11.98|>", - "<|12.00|>", - "<|12.02|>", - "<|12.04|>", - "<|12.06|>", - "<|12.08|>", - "<|12.10|>", - "<|12.12|>", - "<|12.14|>", - "<|12.16|>", - "<|12.18|>", - "<|12.20|>", - "<|12.22|>", - "<|12.24|>", - "<|12.26|>", - "<|12.28|>", - "<|12.30|>", - "<|12.32|>", - "<|12.34|>", - "<|12.36|>", - "<|12.38|>", - "<|12.40|>", - "<|12.42|>", - "<|12.44|>", - "<|12.46|>", - "<|12.48|>", - "<|12.50|>", - "<|12.52|>", - "<|12.54|>", - "<|12.56|>", - "<|12.58|>", - "<|12.60|>", - "<|12.62|>", - "<|12.64|>", - "<|12.66|>", - "<|12.68|>", - "<|12.70|>", - "<|12.72|>", - "<|12.74|>", - "<|12.76|>", - "<|12.78|>", - "<|12.80|>", - "<|12.82|>", - "<|12.84|>", - "<|12.86|>", - "<|12.88|>", - "<|12.90|>", - "<|12.92|>", - "<|12.94|>", - "<|12.96|>", - "<|12.98|>", - "<|13.00|>", - "<|13.02|>", - "<|13.04|>", - "<|13.06|>", - "<|13.08|>", - "<|13.10|>", - "<|13.12|>", - "<|13.14|>", - "<|13.16|>", - "<|13.18|>", - "<|13.20|>", - "<|13.22|>", - "<|13.24|>", - "<|13.26|>", - "<|13.28|>", - "<|13.30|>", - "<|13.32|>", - "<|13.34|>", - "<|13.36|>", - "<|13.38|>", - "<|13.40|>", - "<|13.42|>", - "<|13.44|>", - "<|13.46|>", - "<|13.48|>", - "<|13.50|>", - "<|13.52|>", - "<|13.54|>", - "<|13.56|>", - "<|13.58|>", - "<|13.60|>", - "<|13.62|>", - "<|13.64|>", - "<|13.66|>", - "<|13.68|>", - "<|13.70|>", - "<|13.72|>", - "<|13.74|>", - "<|13.76|>", - "<|13.78|>", - "<|13.80|>", - "<|13.82|>", - "<|13.84|>", - "<|13.86|>", - "<|13.88|>", - "<|13.90|>", - "<|13.92|>", - "<|13.94|>", - "<|13.96|>", - "<|13.98|>", - "<|14.00|>", - "<|14.02|>", - "<|14.04|>", - "<|14.06|>", - "<|14.08|>", - "<|14.10|>", - "<|14.12|>", - "<|14.14|>", - "<|14.16|>", - "<|14.18|>", - "<|14.20|>", - "<|14.22|>", - "<|14.24|>", - "<|14.26|>", - "<|14.28|>", - "<|14.30|>", - "<|14.32|>", - "<|14.34|>", - "<|14.36|>", - "<|14.38|>", - "<|14.40|>", - "<|14.42|>", - "<|14.44|>", - "<|14.46|>", - "<|14.48|>", - "<|14.50|>", - "<|14.52|>", - "<|14.54|>", - "<|14.56|>", - "<|14.58|>", - "<|14.60|>", - "<|14.62|>", - "<|14.64|>", - "<|14.66|>", - "<|14.68|>", - "<|14.70|>", - "<|14.72|>", - "<|14.74|>", - "<|14.76|>", - "<|14.78|>", - "<|14.80|>", - "<|14.82|>", - "<|14.84|>", - "<|14.86|>", - "<|14.88|>", - "<|14.90|>", - "<|14.92|>", - "<|14.94|>", - "<|14.96|>", - "<|14.98|>", - "<|15.00|>", - "<|15.02|>", - "<|15.04|>", - "<|15.06|>", - "<|15.08|>", - "<|15.10|>", - "<|15.12|>", - "<|15.14|>", - "<|15.16|>", - "<|15.18|>", - "<|15.20|>", - "<|15.22|>", - "<|15.24|>", - "<|15.26|>", - "<|15.28|>", - "<|15.30|>", - "<|15.32|>", - "<|15.34|>", - "<|15.36|>", - "<|15.38|>", - "<|15.40|>", - "<|15.42|>", - "<|15.44|>", - "<|15.46|>", - "<|15.48|>", - "<|15.50|>", - "<|15.52|>", - "<|15.54|>", - "<|15.56|>", - "<|15.58|>", - "<|15.60|>", - "<|15.62|>", - "<|15.64|>", - "<|15.66|>", - "<|15.68|>", - "<|15.70|>", - "<|15.72|>", - "<|15.74|>", - "<|15.76|>", - "<|15.78|>", - "<|15.80|>", - "<|15.82|>", - "<|15.84|>", - "<|15.86|>", - "<|15.88|>", - "<|15.90|>", - "<|15.92|>", - "<|15.94|>", - "<|15.96|>", - "<|15.98|>", - "<|16.00|>", - "<|16.02|>", - "<|16.04|>", - "<|16.06|>", - "<|16.08|>", - "<|16.10|>", - "<|16.12|>", - "<|16.14|>", - "<|16.16|>", - "<|16.18|>", - "<|16.20|>", - "<|16.22|>", - "<|16.24|>", - "<|16.26|>", - "<|16.28|>", - "<|16.30|>", - "<|16.32|>", - "<|16.34|>", - "<|16.36|>", - "<|16.38|>", - "<|16.40|>", - "<|16.42|>", - "<|16.44|>", - "<|16.46|>", - "<|16.48|>", - "<|16.50|>", - "<|16.52|>", - "<|16.54|>", - "<|16.56|>", - "<|16.58|>", - "<|16.60|>", - "<|16.62|>", - "<|16.64|>", - "<|16.66|>", - "<|16.68|>", - "<|16.70|>", - "<|16.72|>", - "<|16.74|>", - "<|16.76|>", - "<|16.78|>", - "<|16.80|>", - "<|16.82|>", - "<|16.84|>", - "<|16.86|>", - "<|16.88|>", - "<|16.90|>", - "<|16.92|>", - "<|16.94|>", - "<|16.96|>", - "<|16.98|>", - "<|17.00|>", - "<|17.02|>", - "<|17.04|>", - "<|17.06|>", - "<|17.08|>", - "<|17.10|>", - "<|17.12|>", - "<|17.14|>", - "<|17.16|>", - "<|17.18|>", - "<|17.20|>", - "<|17.22|>", - "<|17.24|>", - "<|17.26|>", - "<|17.28|>", - "<|17.30|>", - "<|17.32|>", - "<|17.34|>", - "<|17.36|>", - "<|17.38|>", - "<|17.40|>", - "<|17.42|>", - "<|17.44|>", - "<|17.46|>", - "<|17.48|>", - "<|17.50|>", - "<|17.52|>", - "<|17.54|>", - "<|17.56|>", - "<|17.58|>", - "<|17.60|>", - "<|17.62|>", - "<|17.64|>", - "<|17.66|>", - "<|17.68|>", - "<|17.70|>", - "<|17.72|>", - "<|17.74|>", - "<|17.76|>", - "<|17.78|>", - "<|17.80|>", - "<|17.82|>", - "<|17.84|>", - "<|17.86|>", - "<|17.88|>", - "<|17.90|>", - "<|17.92|>", - "<|17.94|>", - "<|17.96|>", - "<|17.98|>", - "<|18.00|>", - "<|18.02|>", - "<|18.04|>", - "<|18.06|>", - "<|18.08|>", - "<|18.10|>", - "<|18.12|>", - "<|18.14|>", - "<|18.16|>", - "<|18.18|>", - "<|18.20|>", - "<|18.22|>", - "<|18.24|>", - "<|18.26|>", - "<|18.28|>", - "<|18.30|>", - "<|18.32|>", - "<|18.34|>", - "<|18.36|>", - "<|18.38|>", - "<|18.40|>", - "<|18.42|>", - "<|18.44|>", - "<|18.46|>", - "<|18.48|>", - "<|18.50|>", - "<|18.52|>", - "<|18.54|>", - "<|18.56|>", - "<|18.58|>", - "<|18.60|>", - "<|18.62|>", - "<|18.64|>", - "<|18.66|>", - "<|18.68|>", - "<|18.70|>", - "<|18.72|>", - "<|18.74|>", - "<|18.76|>", - "<|18.78|>", - "<|18.80|>", - "<|18.82|>", - "<|18.84|>", - "<|18.86|>", - "<|18.88|>", - "<|18.90|>", - "<|18.92|>", - "<|18.94|>", - "<|18.96|>", - "<|18.98|>", - "<|19.00|>", - "<|19.02|>", - "<|19.04|>", - "<|19.06|>", - "<|19.08|>", - "<|19.10|>", - "<|19.12|>", - "<|19.14|>", - "<|19.16|>", - "<|19.18|>", - "<|19.20|>", - "<|19.22|>", - "<|19.24|>", - "<|19.26|>", - "<|19.28|>", - "<|19.30|>", - "<|19.32|>", - "<|19.34|>", - "<|19.36|>", - "<|19.38|>", - "<|19.40|>", - "<|19.42|>", - "<|19.44|>", - "<|19.46|>", - "<|19.48|>", - "<|19.50|>", - "<|19.52|>", - "<|19.54|>", - "<|19.56|>", - "<|19.58|>", - "<|19.60|>", - "<|19.62|>", - "<|19.64|>", - "<|19.66|>", - "<|19.68|>", - "<|19.70|>", - "<|19.72|>", - "<|19.74|>", - "<|19.76|>", - "<|19.78|>", - "<|19.80|>", - "<|19.82|>", - "<|19.84|>", - "<|19.86|>", - "<|19.88|>", - "<|19.90|>", - "<|19.92|>", - "<|19.94|>", - "<|19.96|>", - "<|19.98|>", - "<|20.00|>", - "<|20.02|>", - "<|20.04|>", - "<|20.06|>", - "<|20.08|>", - "<|20.10|>", - "<|20.12|>", - "<|20.14|>", - "<|20.16|>", - "<|20.18|>", - "<|20.20|>", - "<|20.22|>", - "<|20.24|>", - "<|20.26|>", - "<|20.28|>", - "<|20.30|>", - "<|20.32|>", - "<|20.34|>", - "<|20.36|>", - "<|20.38|>", - "<|20.40|>", - "<|20.42|>", - "<|20.44|>", - "<|20.46|>", - "<|20.48|>", - "<|20.50|>", - "<|20.52|>", - "<|20.54|>", - "<|20.56|>", - "<|20.58|>", - "<|20.60|>", - "<|20.62|>", - "<|20.64|>", - "<|20.66|>", - "<|20.68|>", - "<|20.70|>", - "<|20.72|>", - "<|20.74|>", - "<|20.76|>", - "<|20.78|>", - "<|20.80|>", - "<|20.82|>", - "<|20.84|>", - "<|20.86|>", - "<|20.88|>", - "<|20.90|>", - "<|20.92|>", - "<|20.94|>", - "<|20.96|>", - "<|20.98|>", - "<|21.00|>", - "<|21.02|>", - "<|21.04|>", - "<|21.06|>", - "<|21.08|>", - "<|21.10|>", - "<|21.12|>", - "<|21.14|>", - "<|21.16|>", - "<|21.18|>", - "<|21.20|>", - "<|21.22|>", - "<|21.24|>", - "<|21.26|>", - "<|21.28|>", - "<|21.30|>", - "<|21.32|>", - "<|21.34|>", - "<|21.36|>", - "<|21.38|>", - "<|21.40|>", - "<|21.42|>", - "<|21.44|>", - "<|21.46|>", - "<|21.48|>", - "<|21.50|>", - "<|21.52|>", - "<|21.54|>", - "<|21.56|>", - "<|21.58|>", - "<|21.60|>", - "<|21.62|>", - "<|21.64|>", - "<|21.66|>", - "<|21.68|>", - "<|21.70|>", - "<|21.72|>", - "<|21.74|>", - "<|21.76|>", - "<|21.78|>", - "<|21.80|>", - "<|21.82|>", - "<|21.84|>", - "<|21.86|>", - "<|21.88|>", - "<|21.90|>", - "<|21.92|>", - "<|21.94|>", - "<|21.96|>", - "<|21.98|>", - "<|22.00|>", - "<|22.02|>", - "<|22.04|>", - "<|22.06|>", - "<|22.08|>", - "<|22.10|>", - "<|22.12|>", - "<|22.14|>", - "<|22.16|>", - "<|22.18|>", - "<|22.20|>", - "<|22.22|>", - "<|22.24|>", - "<|22.26|>", - "<|22.28|>", - "<|22.30|>", - "<|22.32|>", - "<|22.34|>", - "<|22.36|>", - "<|22.38|>", - "<|22.40|>", - "<|22.42|>", - "<|22.44|>", - "<|22.46|>", - "<|22.48|>", - "<|22.50|>", - "<|22.52|>", - "<|22.54|>", - "<|22.56|>", - "<|22.58|>", - "<|22.60|>", - "<|22.62|>", - "<|22.64|>", - "<|22.66|>", - "<|22.68|>", - "<|22.70|>", - "<|22.72|>", - "<|22.74|>", - "<|22.76|>", - "<|22.78|>", - "<|22.80|>", - "<|22.82|>", - "<|22.84|>", - "<|22.86|>", - "<|22.88|>", - "<|22.90|>", - "<|22.92|>", - "<|22.94|>", - "<|22.96|>", - "<|22.98|>", - "<|23.00|>", - "<|23.02|>", - "<|23.04|>", - "<|23.06|>", - "<|23.08|>", - "<|23.10|>", - "<|23.12|>", - "<|23.14|>", - "<|23.16|>", - "<|23.18|>", - "<|23.20|>", - "<|23.22|>", - "<|23.24|>", - "<|23.26|>", - "<|23.28|>", - "<|23.30|>", - "<|23.32|>", - "<|23.34|>", - "<|23.36|>", - "<|23.38|>", - "<|23.40|>", - "<|23.42|>", - "<|23.44|>", - "<|23.46|>", - "<|23.48|>", - "<|23.50|>", - "<|23.52|>", - "<|23.54|>", - "<|23.56|>", - "<|23.58|>", - "<|23.60|>", - "<|23.62|>", - "<|23.64|>", - "<|23.66|>", - "<|23.68|>", - "<|23.70|>", - "<|23.72|>", - "<|23.74|>", - "<|23.76|>", - "<|23.78|>", - "<|23.80|>", - "<|23.82|>", - "<|23.84|>", - "<|23.86|>", - "<|23.88|>", - "<|23.90|>", - "<|23.92|>", - "<|23.94|>", - "<|23.96|>", - "<|23.98|>", - "<|24.00|>", - "<|24.02|>", - "<|24.04|>", - "<|24.06|>", - "<|24.08|>", - "<|24.10|>", - "<|24.12|>", - "<|24.14|>", - "<|24.16|>", - "<|24.18|>", - "<|24.20|>", - "<|24.22|>", - "<|24.24|>", - "<|24.26|>", - "<|24.28|>", - "<|24.30|>", - "<|24.32|>", - "<|24.34|>", - "<|24.36|>", - "<|24.38|>", - "<|24.40|>", - "<|24.42|>", - "<|24.44|>", - "<|24.46|>", - "<|24.48|>", - "<|24.50|>", - "<|24.52|>", - "<|24.54|>", - "<|24.56|>", - "<|24.58|>", - "<|24.60|>", - "<|24.62|>", - "<|24.64|>", - "<|24.66|>", - "<|24.68|>", - "<|24.70|>", - "<|24.72|>", - "<|24.74|>", - "<|24.76|>", - "<|24.78|>", - "<|24.80|>", - "<|24.82|>", - "<|24.84|>", - "<|24.86|>", - "<|24.88|>", - "<|24.90|>", - "<|24.92|>", - "<|24.94|>", - "<|24.96|>", - "<|24.98|>", - "<|25.00|>", - "<|25.02|>", - "<|25.04|>", - "<|25.06|>", - "<|25.08|>", - "<|25.10|>", - "<|25.12|>", - "<|25.14|>", - "<|25.16|>", - "<|25.18|>", - "<|25.20|>", - "<|25.22|>", - "<|25.24|>", - "<|25.26|>", - "<|25.28|>", - "<|25.30|>", - "<|25.32|>", - "<|25.34|>", - "<|25.36|>", - "<|25.38|>", - "<|25.40|>", - "<|25.42|>", - "<|25.44|>", - "<|25.46|>", - "<|25.48|>", - "<|25.50|>", - "<|25.52|>", - "<|25.54|>", - "<|25.56|>", - "<|25.58|>", - "<|25.60|>", - "<|25.62|>", - "<|25.64|>", - "<|25.66|>", - "<|25.68|>", - "<|25.70|>", - "<|25.72|>", - "<|25.74|>", - "<|25.76|>", - "<|25.78|>", - "<|25.80|>", - "<|25.82|>", - "<|25.84|>", - "<|25.86|>", - "<|25.88|>", - "<|25.90|>", - "<|25.92|>", - "<|25.94|>", - "<|25.96|>", - "<|25.98|>", - "<|26.00|>", - "<|26.02|>", - "<|26.04|>", - "<|26.06|>", - "<|26.08|>", - "<|26.10|>", - "<|26.12|>", - "<|26.14|>", - "<|26.16|>", - "<|26.18|>", - "<|26.20|>", - "<|26.22|>", - "<|26.24|>", - "<|26.26|>", - "<|26.28|>", - "<|26.30|>", - "<|26.32|>", - "<|26.34|>", - "<|26.36|>", - "<|26.38|>", - "<|26.40|>", - "<|26.42|>", - "<|26.44|>", - "<|26.46|>", - "<|26.48|>", - "<|26.50|>", - "<|26.52|>", - "<|26.54|>", - "<|26.56|>", - "<|26.58|>", - "<|26.60|>", - "<|26.62|>", - "<|26.64|>", - "<|26.66|>", - "<|26.68|>", - "<|26.70|>", - "<|26.72|>", - "<|26.74|>", - "<|26.76|>", - "<|26.78|>", - "<|26.80|>", - "<|26.82|>", - "<|26.84|>", - "<|26.86|>", - "<|26.88|>", - "<|26.90|>", - "<|26.92|>", - "<|26.94|>", - "<|26.96|>", - "<|26.98|>", - "<|27.00|>", - "<|27.02|>", - "<|27.04|>", - "<|27.06|>", - "<|27.08|>", - "<|27.10|>", - "<|27.12|>", - "<|27.14|>", - "<|27.16|>", - "<|27.18|>", - "<|27.20|>", - "<|27.22|>", - "<|27.24|>", - "<|27.26|>", - "<|27.28|>", - "<|27.30|>", - "<|27.32|>", - "<|27.34|>", - "<|27.36|>", - "<|27.38|>", - "<|27.40|>", - "<|27.42|>", - "<|27.44|>", - "<|27.46|>", - "<|27.48|>", - "<|27.50|>", - "<|27.52|>", - "<|27.54|>", - "<|27.56|>", - "<|27.58|>", - "<|27.60|>", - "<|27.62|>", - "<|27.64|>", - "<|27.66|>", - "<|27.68|>", - "<|27.70|>", - "<|27.72|>", - "<|27.74|>", - "<|27.76|>", - "<|27.78|>", - "<|27.80|>", - "<|27.82|>", - "<|27.84|>", - "<|27.86|>", - "<|27.88|>", - "<|27.90|>", - "<|27.92|>", - "<|27.94|>", - "<|27.96|>", - "<|27.98|>", - "<|28.00|>", - "<|28.02|>", - "<|28.04|>", - "<|28.06|>", - "<|28.08|>", - "<|28.10|>", - "<|28.12|>", - "<|28.14|>", - "<|28.16|>", - "<|28.18|>", - "<|28.20|>", - "<|28.22|>", - "<|28.24|>", - "<|28.26|>", - "<|28.28|>", - "<|28.30|>", - "<|28.32|>", - "<|28.34|>", - "<|28.36|>", - "<|28.38|>", - "<|28.40|>", - "<|28.42|>", - "<|28.44|>", - "<|28.46|>", - "<|28.48|>", - "<|28.50|>", - "<|28.52|>", - "<|28.54|>", - "<|28.56|>", - "<|28.58|>", - "<|28.60|>", - "<|28.62|>", - "<|28.64|>", - "<|28.66|>", - "<|28.68|>", - "<|28.70|>", - "<|28.72|>", - "<|28.74|>", - "<|28.76|>", - "<|28.78|>", - "<|28.80|>", - "<|28.82|>", - "<|28.84|>", - "<|28.86|>", - "<|28.88|>", - "<|28.90|>", - "<|28.92|>", - "<|28.94|>", - "<|28.96|>", - "<|28.98|>", - "<|29.00|>", - "<|29.02|>", - "<|29.04|>", - "<|29.06|>", - "<|29.08|>", - "<|29.10|>", - "<|29.12|>", - "<|29.14|>", - "<|29.16|>", - "<|29.18|>", - "<|29.20|>", - "<|29.22|>", - "<|29.24|>", - "<|29.26|>", - "<|29.28|>", - "<|29.30|>", - "<|29.32|>", - "<|29.34|>", - "<|29.36|>", - "<|29.38|>", - "<|29.40|>", - "<|29.42|>", - "<|29.44|>", - "<|29.46|>", - "<|29.48|>", - "<|29.50|>", - "<|29.52|>", - "<|29.54|>", - "<|29.56|>", - "<|29.58|>", - "<|29.60|>", - "<|29.62|>", - "<|29.64|>", - "<|29.66|>", - "<|29.68|>", - "<|29.70|>", - "<|29.72|>", - "<|29.74|>", - "<|29.76|>", - "<|29.78|>", - "<|29.80|>", - "<|29.82|>", - "<|29.84|>", - "<|29.86|>", - "<|29.88|>", - "<|29.90|>", - "<|29.92|>", - "<|29.94|>", - "<|29.96|>", - "<|29.98|>", - "<|30.00|>" -] \ No newline at end of file diff --git a/stf/nasilhong_f.jpg b/stf/nasilhong_f.jpg deleted file mode 100644 index 9f1de6eac353e4918a5c0e521e9c847f1b59ff41..0000000000000000000000000000000000000000 Binary files a/stf/nasilhong_f.jpg and /dev/null differ diff --git a/stf/stf-api-alternative/.gitignore b/stf/stf-api-alternative/.gitignore deleted file mode 100644 index 68bc17f9ff2104a9d7b6777058bb4c343ca72609..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/.gitignore +++ /dev/null @@ -1,160 +0,0 @@ -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -share/python-wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.nox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -*.py,cover -.hypothesis/ -.pytest_cache/ -cover/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py -db.sqlite3 -db.sqlite3-journal - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -.pybuilder/ -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# IPython -profile_default/ -ipython_config.py - -# pyenv -# For a library or package, you might want to ignore these files since the code is -# intended to run in multiple environments; otherwise, check them in: -# .python-version - -# pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not -# install all needed dependencies. -#Pipfile.lock - -# poetry -# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. -# This is especially recommended for binary packages to ensure reproducibility, and is more -# commonly ignored for libraries. -# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control -#poetry.lock - -# pdm -# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. -#pdm.lock -# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it -# in version control. -# https://pdm.fming.dev/#use-with-ide -.pdm.toml - -# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm -__pypackages__/ - -# Celery stuff -celerybeat-schedule -celerybeat.pid - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ -.dmypy.json -dmypy.json - -# Pyre type checker -.pyre/ - -# pytype static type analyzer -.pytype/ - -# Cython debug symbols -cython_debug/ - -# PyCharm -# JetBrains specific template is maintained in a separate JetBrains.gitignore that can -# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore -# and can be added to the global gitignore or merged into this file. For a more nuclear -# option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ diff --git a/stf/stf-api-alternative/.ipynb_checkpoints/README-checkpoint.md b/stf/stf-api-alternative/.ipynb_checkpoints/README-checkpoint.md deleted file mode 100644 index 5a16c5d0a638b6899d6de799e0a02ce0da0351c8..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/.ipynb_checkpoints/README-checkpoint.md +++ /dev/null @@ -1 +0,0 @@ -stf_api와 동일한 기능을 수행하는 라이브러리 diff --git a/stf/stf-api-alternative/.ipynb_checkpoints/poetry-checkpoint.lock b/stf/stf-api-alternative/.ipynb_checkpoints/poetry-checkpoint.lock deleted file mode 100644 index 70d3bdeb4280d17a172a656ff6fbc1ce9680669c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/.ipynb_checkpoints/poetry-checkpoint.lock +++ /dev/null @@ -1,3251 +0,0 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. - -[[package]] -name = "addict" -version = "2.4.0" -description = "Addict is a dictionary whose items can be set using both attribute and item syntax." -optional = false -python-versions = "*" -files = [ - {file = "addict-2.4.0-py3-none-any.whl", hash = "sha256:249bb56bbfd3cdc2a004ea0ff4c2b6ddc84d53bc2194761636eb314d5cfa5dfc"}, - {file = "addict-2.4.0.tar.gz", hash = "sha256:b3b2210e0e067a281f5646c8c5db92e99b7231ea8b0eb5f74dbdf9e259d4e494"}, -] - -[[package]] -name = "aiohttp" -version = "3.9.5" -description = "Async http client/server framework (asyncio)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, - {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, - {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, - {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"}, - {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"}, - {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"}, - {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"}, - {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, - {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, - {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, -] - -[package.dependencies] -aiosignal = ">=1.1.2" -async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} -attrs = ">=17.3.0" -frozenlist = ">=1.1.1" -multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" - -[package.extras] -speedups = ["Brotli", "aiodns", "brotlicffi"] - -[[package]] -name = "aiosignal" -version = "1.3.1" -description = "aiosignal: a list of registered asynchronous callbacks" -optional = false -python-versions = ">=3.7" -files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, -] - -[package.dependencies] -frozenlist = ">=1.1.0" - -[[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] - -[[package]] -name = "asyncstdlib" -version = "3.12.4" -description = "The missing async toolbox" -optional = false -python-versions = "~=3.8" -files = [ - {file = "asyncstdlib-3.12.4-py3-none-any.whl", hash = "sha256:8e269c30906658faca35936d0348c1057aff4df1ee125f6ce564feeb72212d5e"}, - {file = "asyncstdlib-3.12.4.tar.gz", hash = "sha256:c87e2e2ebfea47d24af728e1caab2a4fb705228508679f30e34afdcbd0097a05"}, -] - -[package.extras] -doc = ["sphinx", "sphinxcontrib-trio"] -test = ["black", "coverage", "flake8", "flake8-2020", "flake8-bugbear", "mypy", "pytest", "pytest-cov"] -typetest = ["mypy", "pyright", "typing-extensions"] - -[[package]] -name = "attrs" -version = "23.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] - -[[package]] -name = "audioread" -version = "3.0.1" -description = "Multi-library, cross-platform audio decoding." -optional = false -python-versions = ">=3.6" -files = [ - {file = "audioread-3.0.1-py3-none-any.whl", hash = "sha256:4cdce70b8adc0da0a3c9e0d85fb10b3ace30fbdf8d1670fd443929b61d117c33"}, - {file = "audioread-3.0.1.tar.gz", hash = "sha256:ac5460a5498c48bdf2e8e767402583a4dcd13f4414d286f42ce4379e8b35066d"}, -] - -[package.extras] -test = ["tox"] - -[[package]] -name = "av" -version = "11.0.0" -description = "Pythonic bindings for FFmpeg's libraries." -optional = false -python-versions = ">=3.8" -files = [ - {file = "av-11.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a01f13b37eb6d181e03bbbbda29093fe2d68f10755795188220acdc89560ec27"}, - {file = "av-11.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b2236faee1b5d71dff3cdef81ef6eec22cc8b71dbfb45eb037e6437fe80f24e7"}, - {file = "av-11.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40543a08e5c84aecd2bc84da5d43548743201897f0ba21bf5ae3a4dcddefca2b"}, - {file = "av-11.0.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2907376884d956376aaf3bc1905fa4e0dcb9ba4e0d183e519392a19d89317d1b"}, - {file = "av-11.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8d5581dcdc81cd601e3ce036809f14da82c46ff187bcefe981ec819390e0ab0"}, - {file = "av-11.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:150490f2a62cfa470f3cb60f3a0060ff93afd807e2b7b3b0eeeb5a992eb8d67b"}, - {file = "av-11.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d9bac0de62f09e2cb4e2132b5a46a89bc31c898189aa285b484c17351d991afe"}, - {file = "av-11.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2122ff8bdace4ce50207920f37de472517921e2ca1f0503464f748fdb8e20506"}, - {file = "av-11.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:527d840697fee6ad4cf47eba987eaf30cd76bd96b2d20eaa907e166b9b8065c8"}, - {file = "av-11.0.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abeaedddfca9101886eb6fc47318c5f5ece8480d330d73aacf6917d7421981a2"}, - {file = "av-11.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13790fbb889b955baf885fe3761e923e85537ef414173465ec293177cedb7b99"}, - {file = "av-11.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:fc27e27f52480287f44226ad4ae3eb53346bf027959d0f00a9154530bd98b371"}, - {file = "av-11.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:892583e2c6b8c2500e5d24310f499caefcdaa2e48c8f7169ad41041aaaf4da11"}, - {file = "av-11.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6943679d70a9f4de974049e7ae2cf0b20afe0d7ddab650526c02a6cf9adcd08f"}, - {file = "av-11.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6d73b038ccf1df5c16bc643eee5c694fb7732e09375e2f4903c1f4ce90dfb72"}, - {file = "av-11.0.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c83422db3333e97b9680700df5185139352fc3a568b14179da3bdcbeb2f0e91b"}, - {file = "av-11.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8413900f6a3639e0088c018a3a516a1656d4d16799e7aa759a16ddf3bd268e2b"}, - {file = "av-11.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:908e49ee336223801d8f2f7dca5a1deb64e9d8256138b8e7a79013b682a6ebb5"}, - {file = "av-11.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:82411ae4a562da07b76028d2f349fb0e6a86aa78ad2b18d2d7bf5b06b17fba14"}, - {file = "av-11.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:621104bd63e38fa4eca554da3722b1aac329619de39152f27eec8999acc72342"}, - {file = "av-11.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:442878990c094455a16c10127edcc54bc4e78d355e6a13ad2a27608b0ecda38f"}, - {file = "av-11.0.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:658199c92987dc72511f5ee8ade62faef6234b7a04c8b5788de99e366be5e073"}, - {file = "av-11.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4b381665c49267b46f87297573898b85e5c41384750fee2e70267fbc4ba318"}, - {file = "av-11.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:60de14f71293e36ca4e297cc8a8460f0cf74f38a201694f3c6fc7f40301582f2"}, - {file = "av-11.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a90f04af96374dab94028a7471597bdfcf03083338b9be2eb8ca4805a8ec7ab5"}, - {file = "av-11.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8821ab2d23e4cb5c8abea6b08d2b1bfceca6af2d88fab1d1dc1b3ec7b34933c7"}, - {file = "av-11.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a92342ed307eeaf9509a6b0f3bafd4337c4880c851b50acc18df48c625b63b6"}, - {file = "av-11.0.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe3502975bc844f5d432c1f24d331bf6ef3e05532ebf06f7ed08b60719b8ea5"}, - {file = "av-11.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c278b3a4fd111b4c9190abe6b1a5ca358d5f91e851d470b62577b957e0187b09"}, - {file = "av-11.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:478aa1d54fbc3058ea65ff41086b6adbe1326b456a027d2f3b59dbe60b4ac2ca"}, - {file = "av-11.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e8df10bb2d56a981d02a8a0b41491912b76dad06305d174a2575ef55ad451100"}, - {file = "av-11.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b30c51e597785a89241bd61865faff2dbd3327856a8285a1e120dbf60e18348b"}, - {file = "av-11.0.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8b8bd92edb096699b306e7b090ad096925ca3bdae6f89656f023fa2a2da627d"}, - {file = "av-11.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9383af733abfc44f6fc29307a6c922fbf671ee343dc97b78b74eac6a2346a46d"}, - {file = "av-11.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a9df4a60579198b560f641cdfe4c2139948a70193ddc096b275f2cf6d94e3e04"}, - {file = "av-11.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8ae5f7ae0a7093fb813686d4aa4c554531f80a28480427f5c155da51b747eff0"}, - {file = "av-11.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50fb7d606f8236891d773c701d5650b93af8dbf78eeaac36fc7e1f7f64a9d664"}, - {file = "av-11.0.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:543e0f9bf6ff02dedbe66d906fbc89c8907c80a8ea7413fc3fed68ce4a6e9b44"}, - {file = "av-11.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:daa279c884457ab194ce78bdd89c0aa391af733da95fb3258d4c6eb8c258299a"}, - {file = "av-11.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1aacc21f4cf96447117a61edfb776afb73186750a5e08a21484ddfc3599aefb5"}, - {file = "av-11.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2568b38eef777b916a5d02e42b8f67f92e12023531239ddd32e1ca4f3cdf8c5b"}, - {file = "av-11.0.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:747c6d347e27c59cc2e78c9c505d23cd88eceff0cc9386be73693ae9009a577c"}, - {file = "av-11.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bbd8f4941b9d3450eff40003b9b9d904667aec7ab085fa31f0f9bca32d755e0"}, - {file = "av-11.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f39c1244ba0cf185b2722aeec116b8a98a2ee5728ce687cec0bda60ee0360dfc"}, - {file = "av-11.0.0.tar.gz", hash = "sha256:48223f000a252070f8e700ff634bb7fb3aa1b7bc7e450373029fbdd6f369ac31"}, -] - -[[package]] -name = "brotli" -version = "1.1.0" -description = "Python bindings for the Brotli compression library" -optional = false -python-versions = "*" -files = [ - {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1140c64812cb9b06c922e77f1c26a75ec5e3f0fb2bf92cc8c58720dec276752"}, - {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8fd5270e906eef71d4a8d19b7c6a43760c6abcfcc10c9101d14eb2357418de9"}, - {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ae56aca0402a0f9a3431cddda62ad71666ca9d4dc3a10a142b9dce2e3c0cda3"}, - {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43ce1b9935bfa1ede40028054d7f48b5469cd02733a365eec8a329ffd342915d"}, - {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7c4855522edb2e6ae7fdb58e07c3ba9111e7621a8956f481c68d5d979c93032e"}, - {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:38025d9f30cf4634f8309c6874ef871b841eb3c347e90b0851f63d1ded5212da"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6a904cb26bfefc2f0a6f240bdf5233be78cd2488900a2f846f3c3ac8489ab80"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"}, - {file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"}, - {file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"}, - {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"}, - {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c8146669223164fc87a7e3de9f81e9423c67a79d6b3447994dfb9c95da16e2d6"}, - {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30924eb4c57903d5a7526b08ef4a584acc22ab1ffa085faceb521521d2de32dd"}, - {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ceb64bbc6eac5a140ca649003756940f8d6a7c444a68af170b3187623b43bebf"}, - {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a469274ad18dc0e4d316eefa616d1d0c2ff9da369af19fa6f3daa4f09671fd61"}, - {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524f35912131cc2cabb00edfd8d573b07f2d9f21fa824bd3fb19725a9cf06327"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5b3cc074004d968722f51e550b41a27be656ec48f8afaeeb45ebf65b561481dd"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"}, - {file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"}, - {file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"}, - {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"}, - {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"}, - {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"}, - {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f4bf76817c14aa98cc6697ac02f3972cb8c3da93e9ef16b9c66573a68014f91"}, - {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0c5516f0aed654134a2fc936325cc2e642f8a0e096d075209672eb321cff408"}, - {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c3020404e0b5eefd7c9485ccf8393cfb75ec38ce75586e046573c9dc29967a0"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ed11165dd45ce798d99a136808a794a748d5dc38511303239d4e2363c0695dc"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"}, - {file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"}, - {file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"}, - {file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"}, - {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"}, - {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"}, - {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4d4a848d1837973bf0f4b5e54e3bec977d99be36a7895c61abb659301b02c112"}, - {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fdc3ff3bfccdc6b9cc7c342c03aa2400683f0cb891d46e94b64a197910dc4064"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5eeb539606f18a0b232d4ba45adccde4125592f3f636a6182b4a8a436548b914"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"}, - {file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"}, - {file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"}, - {file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"}, - {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f733d788519c7e3e71f0855c96618720f5d3d60c3cb829d8bbb722dddce37985"}, - {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:929811df5462e182b13920da56c6e0284af407d1de637d8e536c5cd00a7daf60"}, - {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b63b949ff929fbc2d6d3ce0e924c9b93c9785d877a21a1b678877ffbbc4423a"}, - {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d192f0f30804e55db0d0e0a35d83a9fead0e9a359a9ed0285dbacea60cc10a84"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f296c40e23065d0d6650c4aefe7470d2a25fffda489bcc3eb66083f3ac9f6643"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"}, - {file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"}, - {file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"}, - {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"}, - {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:03d20af184290887bdea3f0f78c4f737d126c74dc2f3ccadf07e54ceca3bf208"}, - {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6172447e1b368dcbc458925e5ddaf9113477b0ed542df258d84fa28fc45ceea7"}, - {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a743e5a28af5f70f9c080380a5f908d4d21d40e8f0e0c8901604d15cfa9ba751"}, - {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0541e747cce78e24ea12d69176f6a7ddb690e62c425e01d31cc065e69ce55b48"}, - {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cdbc1fc1bc0bff1cef838eafe581b55bfbffaed4ed0318b724d0b71d4d377619"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:890b5a14ce214389b2cc36ce82f3093f96f4cc730c1cffdbefff77a7c71f2a97"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"}, - {file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"}, - {file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"}, - {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"}, - {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7905193081db9bfa73b1219140b3d315831cbff0d8941f22da695832f0dd188f"}, - {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a77def80806c421b4b0af06f45d65a136e7ac0bdca3c09d9e2ea4e515367c7e9"}, - {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dadd1314583ec0bf2d1379f7008ad627cd6336625d6679cf2f8e67081b83acf"}, - {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:901032ff242d479a0efa956d853d16875d42157f98951c0230f69e69f9c09bac"}, - {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22fc2a8549ffe699bfba2256ab2ed0421a7b8fadff114a3d201794e45a9ff578"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ae15b066e5ad21366600ebec29a7ccbc86812ed267e4b28e860b8ca16a2bc474"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"}, - {file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"}, - {file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"}, - {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"}, -] - -[[package]] -name = "certifi" -version = "2024.7.4" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, -] - -[[package]] -name = "cffi" -version = "1.16.0" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "cuda-python" -version = "12.5.0" -description = "Python bindings for CUDA" -optional = false -python-versions = "*" -files = [ - {file = "cuda_python-12.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bbfacbbcfcae149dbc937aa614d362608f20bba973882002fee9125ced32604"}, - {file = "cuda_python-12.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f087acc19ac4b467d71cfb7a39306038993176a7a1459426da50afa0fe68c697"}, - {file = "cuda_python-12.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:6a462a2d5d2658041a317c113023ce97a609f98c25272b5b1d13e274eecbb941"}, - {file = "cuda_python-12.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:560453304aee0a3bc28a2626e8e3fd1f94fcea58912b4a9156e6ef2a0121f2ba"}, - {file = "cuda_python-12.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c2f3fc9b8149ffcb4016d8e5ed4ae1d728a55fa02de9b3d2c2c14cda7e4ee8e"}, - {file = "cuda_python-12.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:62d692bcf4fb71224290610df52ce33760e0b08a8a1bf0652003712d70caf063"}, - {file = "cuda_python-12.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f76b2c8b7c97174d62f965ad8a518d10b27cd724ad06bb9f0f59ba6f80b492f"}, - {file = "cuda_python-12.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e7c1d70bf1d6696dfd4a72dfd129cd963b377cfb7ffa9cc6e888101c2b04df4"}, - {file = "cuda_python-12.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:c58a4a86fd4b71cc072a9dfb4789994bebb9a333085ffa75530672fb6380df6c"}, - {file = "cuda_python-12.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cece998c45a35d222edca2db623948c9a3bc4fce6e3a4d6d98e6aa245d28a7ec"}, - {file = "cuda_python-12.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4e9545b69cd3defba350ed9fb7d599c2c558646877a58f8a51ee2cc7a5ad43d"}, - {file = "cuda_python-12.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:22a09b8f0becad338e6e2c2cc97b72d25823517dd8a355009faf43f345ed7d04"}, -] - -[[package]] -name = "decorator" -version = "4.4.2" -description = "Decorators for Humans" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*" -files = [ - {file = "decorator-4.4.2-py2.py3-none-any.whl", hash = "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760"}, - {file = "decorator-4.4.2.tar.gz", hash = "sha256:e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7"}, -] - -[[package]] -name = "face-alignment" -version = "1.3.5" -description = "Detector 2D or 3D face landmarks from Python" -optional = false -python-versions = ">=3" -files = [ - {file = "face_alignment-1.3.5-py2.py3-none-any.whl", hash = "sha256:f0d9d0f8276ff9cccb73f37665acf3e8721a09b758148d767116befa43a73945"}, - {file = "face_alignment-1.3.5.tar.gz", hash = "sha256:20e940a7c769c7c4cdfd1d4ef1212ec68468721b8df0c17bf93b29b37c61a071"}, -] - -[package.dependencies] -numba = "*" -numpy = "*" -opencv-python = "*" -scikit-image = "*" -scipy = ">=0.17" -torch = "*" -tqdm = "*" - -[[package]] -name = "facenet-pytorch" -version = "2.5.2" -description = "Pretrained Pytorch face detection and recognition models" -optional = false -python-versions = "*" -files = [ - {file = "facenet-pytorch-2.5.2.tar.gz", hash = "sha256:6188402b90f29b23f4de31834f275f8d8eea83d085c2845bad4bad80547b7b53"}, - {file = "facenet_pytorch-2.5.2-py3-none-any.whl", hash = "sha256:5d4be649f86f5a09837a5972b143ddc65e19016f52eef6e4ee6afb0044253b98"}, -] - -[package.dependencies] -numpy = "*" -pillow = "*" -requests = "*" -torchvision = "*" - -[[package]] -name = "ffmpeg-python" -version = "0.2.0" -description = "Python bindings for FFmpeg - with complex filtering support" -optional = false -python-versions = "*" -files = [ - {file = "ffmpeg-python-0.2.0.tar.gz", hash = "sha256:65225db34627c578ef0e11c8b1eb528bb35e024752f6f10b78c011f6f64c4127"}, - {file = "ffmpeg_python-0.2.0-py3-none-any.whl", hash = "sha256:ac441a0404e053f8b6a1113a77c0f452f1cfc62f6344a769475ffdc0f56c23c5"}, -] - -[package.dependencies] -future = "*" - -[package.extras] -dev = ["Sphinx (==2.1.0)", "future (==0.17.1)", "numpy (==1.16.4)", "pytest (==4.6.1)", "pytest-mock (==1.10.4)", "tox (==3.12.1)"] - -[[package]] -name = "filelock" -version = "3.15.4" -description = "A platform independent file lock." -optional = false -python-versions = ">=3.8" -files = [ - {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, - {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] -typing = ["typing-extensions (>=4.8)"] - -[[package]] -name = "frozenlist" -version = "1.4.1" -description = "A list-like structure which implements collections.abc.MutableSequence" -optional = false -python-versions = ">=3.8" -files = [ - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, - {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, - {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, - {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, - {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, - {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, - {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, - {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, - {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, - {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, - {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, - {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, - {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, -] - -[[package]] -name = "fsspec" -version = "2024.6.1" -description = "File-system specification" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"}, - {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"}, -] - -[package.extras] -abfs = ["adlfs"] -adl = ["adlfs"] -arrow = ["pyarrow (>=1)"] -dask = ["dask", "distributed"] -dev = ["pre-commit", "ruff"] -doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"] -dropbox = ["dropbox", "dropboxdrivefs", "requests"] -full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] -fuse = ["fusepy"] -gcs = ["gcsfs"] -git = ["pygit2"] -github = ["requests"] -gs = ["gcsfs"] -gui = ["panel"] -hdfs = ["pyarrow (>=1)"] -http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] -libarchive = ["libarchive-c"] -oci = ["ocifs"] -s3 = ["s3fs"] -sftp = ["paramiko"] -smb = ["smbprotocol"] -ssh = ["paramiko"] -test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"] -test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask-expr", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"] -test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"] -tqdm = ["tqdm"] - -[[package]] -name = "future" -version = "1.0.0" -description = "Clean single-source support for Python 3 and 2" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216"}, - {file = "future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05"}, -] - -[[package]] -name = "gevent" -version = "24.2.1" -description = "Coroutine-based network library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "gevent-24.2.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:6f947a9abc1a129858391b3d9334c45041c08a0f23d14333d5b844b6e5c17a07"}, - {file = "gevent-24.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde283313daf0b34a8d1bab30325f5cb0f4e11b5869dbe5bc61f8fe09a8f66f3"}, - {file = "gevent-24.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a1df555431f5cd5cc189a6ee3544d24f8c52f2529134685f1e878c4972ab026"}, - {file = "gevent-24.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14532a67f7cb29fb055a0e9b39f16b88ed22c66b96641df8c04bdc38c26b9ea5"}, - {file = "gevent-24.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd23df885318391856415e20acfd51a985cba6919f0be78ed89f5db9ff3a31cb"}, - {file = "gevent-24.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ca80b121bbec76d7794fcb45e65a7eca660a76cc1a104ed439cdbd7df5f0b060"}, - {file = "gevent-24.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9913c45d1be52d7a5db0c63977eebb51f68a2d5e6fd922d1d9b5e5fd758cc98"}, - {file = "gevent-24.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:918cdf8751b24986f915d743225ad6b702f83e1106e08a63b736e3a4c6ead789"}, - {file = "gevent-24.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:3d5325ccfadfd3dcf72ff88a92fb8fc0b56cacc7225f0f4b6dcf186c1a6eeabc"}, - {file = "gevent-24.2.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:03aa5879acd6b7076f6a2a307410fb1e0d288b84b03cdfd8c74db8b4bc882fc5"}, - {file = "gevent-24.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8bb35ce57a63c9a6896c71a285818a3922d8ca05d150fd1fe49a7f57287b836"}, - {file = "gevent-24.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d7f87c2c02e03d99b95cfa6f7a776409083a9e4d468912e18c7680437b29222c"}, - {file = "gevent-24.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:968581d1717bbcf170758580f5f97a2925854943c45a19be4d47299507db2eb7"}, - {file = "gevent-24.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7899a38d0ae7e817e99adb217f586d0a4620e315e4de577444ebeeed2c5729be"}, - {file = "gevent-24.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f5e8e8d60e18d5f7fd49983f0c4696deeddaf6e608fbab33397671e2fcc6cc91"}, - {file = "gevent-24.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fbfdce91239fe306772faab57597186710d5699213f4df099d1612da7320d682"}, - {file = "gevent-24.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cdf66977a976d6a3cfb006afdf825d1482f84f7b81179db33941f2fc9673bb1d"}, - {file = "gevent-24.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:1dffb395e500613e0452b9503153f8f7ba587c67dd4a85fc7cd7aa7430cb02cc"}, - {file = "gevent-24.2.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:6c47ae7d1174617b3509f5d884935e788f325eb8f1a7efc95d295c68d83cce40"}, - {file = "gevent-24.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7cac622e11b4253ac4536a654fe221249065d9a69feb6cdcd4d9af3503602e0"}, - {file = "gevent-24.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bf5b9c72b884c6f0c4ed26ef204ee1f768b9437330422492c319470954bc4cc7"}, - {file = "gevent-24.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5de3c676e57177b38857f6e3cdfbe8f38d1cd754b63200c0615eaa31f514b4f"}, - {file = "gevent-24.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4faf846ed132fd7ebfbbf4fde588a62d21faa0faa06e6f468b7faa6f436b661"}, - {file = "gevent-24.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:368a277bd9278ddb0fde308e6a43f544222d76ed0c4166e0d9f6b036586819d9"}, - {file = "gevent-24.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f8a04cf0c5b7139bc6368b461257d4a757ea2fe89b3773e494d235b7dd51119f"}, - {file = "gevent-24.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9d8d0642c63d453179058abc4143e30718b19a85cbf58c2744c9a63f06a1d388"}, - {file = "gevent-24.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:94138682e68ec197db42ad7442d3cf9b328069c3ad8e4e5022e6b5cd3e7ffae5"}, - {file = "gevent-24.2.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:8f4b8e777d39013595a7740b4463e61b1cfe5f462f1b609b28fbc1e4c4ff01e5"}, - {file = "gevent-24.2.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141a2b24ad14f7b9576965c0c84927fc85f824a9bb19f6ec1e61e845d87c9cd8"}, - {file = "gevent-24.2.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9202f22ef811053077d01f43cc02b4aaf4472792f9fd0f5081b0b05c926cca19"}, - {file = "gevent-24.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2955eea9c44c842c626feebf4459c42ce168685aa99594e049d03bedf53c2800"}, - {file = "gevent-24.2.1-cp38-cp38-win32.whl", hash = "sha256:44098038d5e2749b0784aabb27f1fcbb3f43edebedf64d0af0d26955611be8d6"}, - {file = "gevent-24.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:117e5837bc74a1673605fb53f8bfe22feb6e5afa411f524c835b2ddf768db0de"}, - {file = "gevent-24.2.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:2ae3a25ecce0a5b0cd0808ab716bfca180230112bb4bc89b46ae0061d62d4afe"}, - {file = "gevent-24.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7ceb59986456ce851160867ce4929edaffbd2f069ae25717150199f8e1548b8"}, - {file = "gevent-24.2.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:2e9ac06f225b696cdedbb22f9e805e2dd87bf82e8fa5e17756f94e88a9d37cf7"}, - {file = "gevent-24.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:90cbac1ec05b305a1b90ede61ef73126afdeb5a804ae04480d6da12c56378df1"}, - {file = "gevent-24.2.1-cp39-cp39-win32.whl", hash = "sha256:782a771424fe74bc7e75c228a1da671578c2ba4ddb2ca09b8f959abdf787331e"}, - {file = "gevent-24.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:3adfb96637f44010be8abd1b5e73b5070f851b817a0b182e601202f20fa06533"}, - {file = "gevent-24.2.1-pp310-pypy310_pp73-macosx_11_0_universal2.whl", hash = "sha256:7b00f8c9065de3ad226f7979154a7b27f3b9151c8055c162332369262fc025d8"}, - {file = "gevent-24.2.1.tar.gz", hash = "sha256:432fc76f680acf7cf188c2ee0f5d3ab73b63c1f03114c7cd8a34cebbe5aa2056"}, -] - -[package.dependencies] -cffi = {version = ">=1.12.2", markers = "platform_python_implementation == \"CPython\" and sys_platform == \"win32\""} -greenlet = [ - {version = ">=3.0rc3", markers = "platform_python_implementation == \"CPython\" and python_version >= \"3.11\""}, - {version = ">=2.0.0", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.11\""}, -] -"zope.event" = "*" -"zope.interface" = "*" - -[package.extras] -dnspython = ["dnspython (>=1.16.0,<2.0)", "idna"] -docs = ["furo", "repoze.sphinx.autointerface", "sphinx", "sphinxcontrib-programoutput", "zope.schema"] -monitor = ["psutil (>=5.7.0)"] -recommended = ["cffi (>=1.12.2)", "dnspython (>=1.16.0,<2.0)", "idna", "psutil (>=5.7.0)"] -test = ["cffi (>=1.12.2)", "coverage (>=5.0)", "dnspython (>=1.16.0,<2.0)", "idna", "objgraph", "psutil (>=5.7.0)", "requests"] - -[[package]] -name = "geventhttpclient" -version = "2.0.2" -description = "http client library for gevent" -optional = false -python-versions = "*" -files = [ - {file = "geventhttpclient-2.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd76acdc7e7ee5c54c7b279f806b28957a6b092f79c40db34adcfd972749343c"}, - {file = "geventhttpclient-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:320a2c756d8a4f296de370476a1515485c186d9e22c3fc29e04f8f743a7d47bb"}, - {file = "geventhttpclient-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:36d3345c6585b09738195a7c45d279a87ccbab0350f1cce3679d3f0dce8577a1"}, - {file = "geventhttpclient-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:407d54499556c2741b93691b86da93232590b013f4a0b773327d766fe3e5c0a9"}, - {file = "geventhttpclient-2.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcf325131b0e4600b793643108cd85dddd66bbf532fd2eb498be5727ef532a1e"}, - {file = "geventhttpclient-2.0.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5841dd02e6f792a4ef15dbd04fefe620c831ba0b78105808160bb779a31af4"}, - {file = "geventhttpclient-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2ba69422d4e8670dd99803b1313ba574a4d41f52e92b512af51068c9c577bdc1"}, - {file = "geventhttpclient-2.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e3af579c6b46b9caa515a8baf6a2cadeafcd1d41ad22ca5712851f074a40b47"}, - {file = "geventhttpclient-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6ff7fc19f9a4fdd54a2b1c106a705ea2c679fa049685ed763051d417725bdab1"}, - {file = "geventhttpclient-2.0.2-cp310-cp310-win32.whl", hash = "sha256:eec7c52e8eb817674a193e0124486b507215d9e86d34f2638bf9a9292d16f815"}, - {file = "geventhttpclient-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:0e9f7283c01d970e643d89da81127869a8d94bb7a0081020dcad5b590bc007c4"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5ceb492d43a659b895794999dc40d0e7c23b1d41dd34040bbacd0dc264b57d5b"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:95959c201d3151fa8f57e0f1ce184476d1173996bdde41dc7d600006023dc5be"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:31c7febba298ecf44838561074a3fb7a01523adca286469b5a82dcc90e8d6a07"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:996c5f453d810b3c592160193d6832a065cca0112e92adc74e62df0e4c564df6"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f817e226c02b5a71d86de3772d6accdf250288d1e6825e426c713759830162d"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c55b7ac0ba0e1e1afbf297b7608f0b3a0bbc34fb4b0c19b7869f32a77ddc6209"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6775bc81e25c48fa58b034444aecfa508b0c3d1bc1e4ae546cc17661be1f51aa"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a0156882c73537bbbbc7c693ae44c9808119963174078692613ffa4feea21fcf"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3ebb582a291c4c5daaac2ea115b413f4be86874baa60def44d333301cee17bd7"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-win32.whl", hash = "sha256:716f1f72f50b841daf9c9511a01fc31a030866510a11863f27741e26e4f556a7"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:777fcdb72077dfbf70516ecb9e9022246dd337b83a4c1e96f17f3ab9e15f4547"}, - {file = "geventhttpclient-2.0.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:379d90d8b1fcdda94e74d693806e0b0116c0610504e7f62d5576bac738dc66a5"}, - {file = "geventhttpclient-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00b7b2b836294c091c53789a469c5671202d79420b5191931df4e3a767d607fa"}, - {file = "geventhttpclient-2.0.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d075355862d7726eb3436f0136fce7650c884f2d04eaae7a39fed3aad9798bc"}, - {file = "geventhttpclient-2.0.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa7b1a27f950d209fe223a97906fe41312dc12c92372424639b8a9b96f1adf91"}, - {file = "geventhttpclient-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:fe4e06313aad353b103950780b050d3958000464cc732d621ff8ea3cacbd2bc4"}, - {file = "geventhttpclient-2.0.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:84d7be660b6bc53dd53e3f46b3bc5d275972a8116bd183a77139bb4d9d6d9fb1"}, - {file = "geventhttpclient-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:81f839d6becd664d0972b488422f5dc821f8ad2f2196d53aa5e4d799a3a35a66"}, - {file = "geventhttpclient-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:e707f62271a093e6e3af6f1bbd8cc398b414b8c508fe6b15505dd8e76c4409ac"}, - {file = "geventhttpclient-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:28d7655d1d50bc75ece683a0ae8faf978821d4aeae358d77b59371548db07f1e"}, - {file = "geventhttpclient-2.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58877b4440a580063571a23fbc616aed7c735c6bf9ef525c5129783df8b6966"}, - {file = "geventhttpclient-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57c993c4b2bea551c4a71b75ae1e172e9f3e4352f704ff1b619a0f16aa762f76"}, - {file = "geventhttpclient-2.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f67e789e31c7b1ce440cd1465dcdefeca29ba6108735eac0b1a593d3a55b7f"}, - {file = "geventhttpclient-2.0.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f3326e115ec7e7ce95a5d0d47698e8f3584944c4c434a7404937d56b17136b8"}, - {file = "geventhttpclient-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ef328ee3e7dca5055b833fdf3c181647a335abf0249947b27f5df2d95390198c"}, - {file = "geventhttpclient-2.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:27049ea40e3b559eee380310272aaa9b7c19e73c1d9e51e2ec137362be2caa70"}, - {file = "geventhttpclient-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b88a10538341e33fed1682c0dd4579c655d49db5863e7456583085a1cd6bd9d4"}, - {file = "geventhttpclient-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:d52aba2c38420b3fc518188449f1c2a46b1a99adf1c0266c68e72ee0422cd0fa"}, - {file = "geventhttpclient-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:3648626ca58ea4b340e695d78e5d533e6b8be78d375edbd42ff188bc3447e095"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fcf96e212b55b93490f3a5fcdfe7a2ef4995a0d13b7d9df398b11e319b7a86b1"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e9f2ff09706e3a64a99886d5f2595f3bf364821bc609f2865dbc3e499e21a36"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:721c3075897bfc81e918066f16ae3d1a88c7bb14eeeb831a4f89ea636474643e"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91615fed7931acd49cfe5fc30984acd5411dc1f2643b1544c879d1a537233c6d"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7adaa29e5699dea54e0224d1d2d9d8869668d8ad79f5b89433ff9c46f9424a6c"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9be5000ba57336a90b438782117c1e43205f51f49aa9b1499a82e210e8431b11"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:12d271cc53486efb3716e99855dc5cb84f2cd3fc9f3243721747bb39ec0fff8a"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b9c0c6b75b3905000d2490dc64b4c98a8bac155efbc0ff8917ac082ae0bad261"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e956a457d8831dc81d6f046ab09ebeec680f9a1e9c07e25a1906e77b287918ee"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-win32.whl", hash = "sha256:bc46d5479673dfb293ea428c057d2e23e48ebef5c5d44587cdbaada7f87553e4"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:f44153e4b3ef9b901edcd14be54145a0058bf5fa371b3e583153865fac866245"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ebf98db9435824cf0b80b5247be6c88b20bfafd6249f7ebaabb85297da37e380"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c8b7298eb1ebd015257bf4503e34f5fbbe64bd83324140f76b511046aba5a0d5"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:60b81a6d4e65db7c1a5350c9fb72ebf800b478849a7e8020d1ab93af237a3747"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad6c2fcbc3733785bd3b8c2bb43d1f605f9085b0a8b70ce354d198f37143f884"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94edb022fa50d576cf63f6dd0c437c1acd24a719872a5935991aaf08f8e88cb2"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ca459cedb3827d960362e05ea3a4ae600a6d0d93de77eac2ac0f79828e5e18c"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7551b6db860b56411de1f96618e91b54f65e1a7be8d10255bd1adfb738bb6ee5"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bcb7e061c243308d9a44b02de5298001e917f1636a9f270c10da86601fcc8dfa"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:96922d170ef8933f4c20036e8d70d4fbe861f54c543e32e7459ebdbaafa65a2e"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ebb3c993903d40fd4bb1f3e55b84c62c8fc1d14433ae6d4d477dd9a325354c94"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:dbccf1ba155dea3ea99ba0e67a835c05b4303f05298e85f5bb2a46700ccdf092"}, - {file = "geventhttpclient-2.0.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8770b8ab9e8c31d2aaf8a6fbc63fbb7239c58db10bb49cee191ca5c141c61542"}, - {file = "geventhttpclient-2.0.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daff1e977fccf98f27266d3891afdc101f1d705a48331754909e960bcae83f8a"}, - {file = "geventhttpclient-2.0.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2435e0f2a60e00d977822ec4c12e7851deb7aa49a23d32d648e72c641aae3b05"}, - {file = "geventhttpclient-2.0.2-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09acd03d0a8c1bb7d5a1cb6fcb77aaa19a907c1b4915ab58da5d283675edb0a5"}, - {file = "geventhttpclient-2.0.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:5d0813d97050446dab2fb243312e6c446e4ef5e9591befd597ef8f2887f8e2a8"}, - {file = "geventhttpclient-2.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:852da9bb0fc792cdca5ffc9327490094783e42415494b3569e5d532615027439"}, - {file = "geventhttpclient-2.0.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e79304a63a9d0512f2757c5862487b332b18a9c85feebecf6ebc3526c6dd1ba2"}, - {file = "geventhttpclient-2.0.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c1c783fce45f16db448d7e34864f1e9c22fe60a7780d2c1c14edbb1fb7262e"}, - {file = "geventhttpclient-2.0.2-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77c407c2b4bea817c6f752502db4ab0e9f9465b4fb85b459d1332b5f93a3096c"}, - {file = "geventhttpclient-2.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4f0d70a83ef4ab93102c6601477c13e9cdbc87205e5237fbf5797e30dc9d3ee8"}, - {file = "geventhttpclient-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b03f298ec19b8a4717cce8112fe30322c9e5bfada84dde61a1a44d1eeffc1d3c"}, - {file = "geventhttpclient-2.0.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2dc94b9a23eb6744a8c729aec2b1cdc4e39acf1d8f16ea85a62810aa6b2cae5"}, - {file = "geventhttpclient-2.0.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:805554594bb29231fd990cc2cbbe493d223d76a6085fec891dd76bb4e0928933"}, - {file = "geventhttpclient-2.0.2-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb23527d98f626ca7a4e8961ed9bdc6aed3388de306614c69a133b34262460f4"}, - {file = "geventhttpclient-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a594ab319872a38fb7f16be4cfb107d3c63c43a081f2abe241834e9877f27401"}, - {file = "geventhttpclient-2.0.2.tar.gz", hash = "sha256:8135a85200b170def7293d01dd1557931fcd1bec1ac78c52ad7cedd22368b9ba"}, -] - -[package.dependencies] -brotli = "*" -certifi = "*" -gevent = ">=0.13" -six = "*" - -[[package]] -name = "greenlet" -version = "3.0.3" -description = "Lightweight in-process concurrent programming" -optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - -[[package]] -name = "grpcio" -version = "1.65.0" -description = "HTTP/2-based RPC framework" -optional = false -python-versions = ">=3.8" -files = [ - {file = "grpcio-1.65.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:66ea0ca6108fcb391444bb7b37d04eac85bfaea1cfaf16db675d3734fc74ca1b"}, - {file = "grpcio-1.65.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:45d371dc4436fdcc31677f75b3ebe6175fbf0712ced49e0e4dfc18bbaf50f5a7"}, - {file = "grpcio-1.65.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:02dbbe113ec48581da07b7ddf52bfd49f5772374c4b5e36ea25131ce00b4f4f3"}, - {file = "grpcio-1.65.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5c9ee7b8f1ac82cc24f223cd7ec803c17079f90e63022d3e66c5e53fff0afb99"}, - {file = "grpcio-1.65.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da927f8a44e42837ae0027a3a063c85e2b26491d2babd4554e116f66fd46045d"}, - {file = "grpcio-1.65.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9916ea670a589f95f2453a4a5040294ace096271c126e684a1e45e61af76c988"}, - {file = "grpcio-1.65.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c46114787c5f530e845d2781f914600aade04b4f132dd012efb31bc4f76a72bb"}, - {file = "grpcio-1.65.0-cp310-cp310-win32.whl", hash = "sha256:1362d94ac9c05b202736180d23296840e00f495859b206261e6ed03a6d41978b"}, - {file = "grpcio-1.65.0-cp310-cp310-win_amd64.whl", hash = "sha256:00ed0828980009ce852d98230cdd2d5a22a4bcb946b5a0f6334dfd8258374cd7"}, - {file = "grpcio-1.65.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:25303f3747522252dd9cfcbacb88d828a36040f513e28fba17ee6184ebc3d330"}, - {file = "grpcio-1.65.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2a2b368717dd8e0f6cb7e412d3b3bfb0012f61c04b2f76dbed669b0f5cf3fb0c"}, - {file = "grpcio-1.65.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:93c41fb74c576dc0130b190a5775197282115c6abbe1d913d42d9a2f9d98fdae"}, - {file = "grpcio-1.65.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34eb4fb9ef4d11ea741d264916d1b31a9e169d539a6f1c8300e04c493eec747e"}, - {file = "grpcio-1.65.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55c41272f9d7d3503e3e3e93f3f98589f07075eebd24e1c291a1df2e8ef40a49"}, - {file = "grpcio-1.65.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c275bac926754022c89ef03f16470f65b811e2cc25f2167d365564ad43e31001"}, - {file = "grpcio-1.65.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b02db2a59071f4d05cfc4d0c972759778d27e1d3347f22ca178b91117ad10541"}, - {file = "grpcio-1.65.0-cp311-cp311-win32.whl", hash = "sha256:ec9f41b9b0eb6407a6edb21bc22cb32e03cae76cde9c1d8bb151ed77c2c5af94"}, - {file = "grpcio-1.65.0-cp311-cp311-win_amd64.whl", hash = "sha256:3efc8b0600870f5e518dd2738188b3ba7b1bb2668244c9a2a8c4debda4ffe62b"}, - {file = "grpcio-1.65.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:d787abafafa9ed71e17220d4178c883abdb380e0484bd8965cb2e06375c7495b"}, - {file = "grpcio-1.65.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:52347f21d6ec77d7e7e4d5037f5e8ac0a0c851856d9459f9f95b009c2c740b4a"}, - {file = "grpcio-1.65.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:b16e1cd9b9cb9ac942cb20b7a2b1c5d35b9e61017e2998bf242a6f7748071795"}, - {file = "grpcio-1.65.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89bc9c8c6743a48f115fea8f3fada76be269d1914bf636e5fdb7cec9cdf192bc"}, - {file = "grpcio-1.65.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5a2ae900e6423438c4a9a5be38e9228621340a18333371215c0419d24a254ef"}, - {file = "grpcio-1.65.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4f451091ddd28f00c655f0b1e208cca705d40e4fde56a3cf849fead61a700d10"}, - {file = "grpcio-1.65.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4e30cd885e02abb98d6b0d5beb6259a567b0ce1416c498ec815fe383adb77864"}, - {file = "grpcio-1.65.0-cp312-cp312-win32.whl", hash = "sha256:9a9a0ce10a07923ebd48c056060052ebddfbec3193cdd32207af358ef317b00a"}, - {file = "grpcio-1.65.0-cp312-cp312-win_amd64.whl", hash = "sha256:87d9350ffe1a84b7441db7c70fdb4e51269a379f7a95d696d0d133831c4f9a19"}, - {file = "grpcio-1.65.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:0c504b30fc2fba143d9254e0240243b5866df9b7523162448797f4b21b5f30d5"}, - {file = "grpcio-1.65.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:480be4d41ceb5a7f22ecfc8db1ab68aeb58cc1a2da0865a91917d3cd0438dac7"}, - {file = "grpcio-1.65.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:984a1627b50d5df4a24120302ca95adb5139ba1c40354ba258fc2913666d8ee7"}, - {file = "grpcio-1.65.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f242956c0f4985dfcc920cd251cd7a899ca168e157e98c9b74a688657e813ad6"}, - {file = "grpcio-1.65.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ea93f570b2341c69635b8a333afb99fb4d5584f26a9cc94f06e56c943648aab"}, - {file = "grpcio-1.65.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1bebefd76517a43d0e77a5dcd61a8b69e9775340d856a0b35c6368ae628f7714"}, - {file = "grpcio-1.65.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:356d10a491a92a08c21aef806379f7b020f591c23580e3d29aeeb59d45908c86"}, - {file = "grpcio-1.65.0-cp38-cp38-win32.whl", hash = "sha256:c3294fd3ef9faa1fe14ad15d72dd7d2ee9fee6d3bd29a08c53e59a3c94de9cc9"}, - {file = "grpcio-1.65.0-cp38-cp38-win_amd64.whl", hash = "sha256:a2defc49c984550f25034e88d17a7e69dba6deb2b981d8f56f19b3aaa788ff30"}, - {file = "grpcio-1.65.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:b73022222ed4bf718d3d8527a9b88b162074a62c7530d30f4e951b56304b0f19"}, - {file = "grpcio-1.65.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16e0f789158ecc8309e0a2f16cb8c5e4753f351a7673aab75f42783c83f1e38b"}, - {file = "grpcio-1.65.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:cb0bd8bfba21fe0318317bf11687c67a3f8ce726369c0b3ccf4e6607fc5bc5f2"}, - {file = "grpcio-1.65.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1096f0fa79ec601aefd71685d3a610cdde96274c38cd8adcef972660297669a"}, - {file = "grpcio-1.65.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e576a88ce82fea70e68c548aceb5cd560c27da50091581996858bbbe01230c83"}, - {file = "grpcio-1.65.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ab70bd1ccb05ef373b691a9b9985289d8b2cf63c704471f5ee132e228d351af5"}, - {file = "grpcio-1.65.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:03eab632a8ce8dba00d97482d2821bf752a7c3cb4dc051be6c587ad3ca1c3e6d"}, - {file = "grpcio-1.65.0-cp39-cp39-win32.whl", hash = "sha256:f19bb85795ca82e007be427e7b6ac5e730023ffbab69d39ddeb1b84c6339df16"}, - {file = "grpcio-1.65.0-cp39-cp39-win_amd64.whl", hash = "sha256:dbd7eeafa67d8e403ac61caa31ebda2861435dcfd7bb7953c4ef05ad2ecf74bf"}, - {file = "grpcio-1.65.0.tar.gz", hash = "sha256:2c7891f66daefc80cce1bed6bc0c2802d26dac46544ba1be79c4e7d85661dd73"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.65.0)"] - -[[package]] -name = "huggingface-hub" -version = "0.23.4" -description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "huggingface_hub-0.23.4-py3-none-any.whl", hash = "sha256:3a0b957aa87150addf0cc7bd71b4d954b78e749850e1e7fb29ebbd2db64ca037"}, - {file = "huggingface_hub-0.23.4.tar.gz", hash = "sha256:35d99016433900e44ae7efe1c209164a5a81dbbcd53a52f99c281dcd7ce22431"}, -] - -[package.dependencies] -filelock = "*" -fsspec = ">=2023.5.0" -packaging = ">=20.9" -pyyaml = ">=5.1" -requests = "*" -tqdm = ">=4.42.1" -typing-extensions = ">=3.7.4.3" - -[package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] -cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] -fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] -hf-transfer = ["hf-transfer (>=0.1.4)"] -inference = ["aiohttp", "minijinja (>=1.0)"] -quality = ["mypy (==1.5.1)", "ruff (>=0.3.0)"] -tensorflow = ["graphviz", "pydot", "tensorflow"] -tensorflow-testing = ["keras (<3.0)", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] -torch = ["safetensors", "torch"] -typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] - -[[package]] -name = "idna" -version = "3.7" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, -] - -[[package]] -name = "imageio" -version = "2.13.5" -description = "Library for reading and writing a wide range of image, video, scientific, and volumetric data formats." -optional = false -python-versions = ">=3.5" -files = [ - {file = "imageio-2.13.5-py3-none-any.whl", hash = "sha256:a3a18d5d01732557247fba5658d7f75425e97ce49c8fe2cd81bd348f5c71ffb2"}, - {file = "imageio-2.13.5.tar.gz", hash = "sha256:c7ec2be58e401b6eaa838f8eaf8368ed54b2de4a1b001fe6551644f1a30a843d"}, -] - -[package.dependencies] -numpy = "*" -pillow = ">=8.3.2" - -[package.extras] -build = ["wheel"] -dev = ["black", "flake8", "invoke", "pytest", "pytest-cov"] -docs = ["numpydoc", "pydata-sphinx-theme", "sphinx"] -ffmpeg = ["imageio-ffmpeg", "psutil"] -fits = ["astropy"] -full = ["astropy", "black", "flake8", "gdal", "imageio-ffmpeg", "invoke", "itk", "numpydoc", "psutil", "pydata-sphinx-theme", "pytest", "pytest-cov", "sphinx", "tifffile", "wheel"] -gdal = ["gdal"] -itk = ["itk"] -linting = ["black", "flake8"] -test = ["invoke", "pytest", "pytest-cov"] -tifffile = ["tifffile"] - -[[package]] -name = "imageio-ffmpeg" -version = "0.4.5" -description = "FFMPEG wrapper for Python" -optional = false -python-versions = ">=3.4" -files = [ - {file = "imageio-ffmpeg-0.4.5.tar.gz", hash = "sha256:f2ea4245a2adad25dedf98d343159579167e549ac8c4691cef5eff980e20c139"}, - {file = "imageio_ffmpeg-0.4.5-py3-none-macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:266601aab7619acf6ff78cd5ba78b5a593a1119a96d266d33b88bfcd01bbd3ca"}, - {file = "imageio_ffmpeg-0.4.5-py3-none-manylinux2010_x86_64.whl", hash = "sha256:f127b8cdd842e8398de5f2aef23c687ae75d4d964e1df2ea3a9ff03e92a370e7"}, - {file = "imageio_ffmpeg-0.4.5-py3-none-manylinux2014_aarch64.whl", hash = "sha256:db4d318f640419037a0df29bb11b1022f2f8094c90b4aac8affc7177b8ce4641"}, - {file = "imageio_ffmpeg-0.4.5-py3-none-win32.whl", hash = "sha256:39a9ab4326bdf5eae3457961dfdfb4317078659ebe4e6980914ac897a462aeb2"}, - {file = "imageio_ffmpeg-0.4.5-py3-none-win_amd64.whl", hash = "sha256:d2ba8339eecc02fa73a6b85c34654c49a7c78d732a1ac76478d11224e6cfa902"}, -] - -[[package]] -name = "intel-openmp" -version = "2021.4.0" -description = "Intel OpenMP* Runtime Library" -optional = false -python-versions = "*" -files = [ - {file = "intel_openmp-2021.4.0-py2.py3-none-macosx_10_15_x86_64.macosx_11_0_x86_64.whl", hash = "sha256:41c01e266a7fdb631a7609191709322da2bbf24b252ba763f125dd651bcc7675"}, - {file = "intel_openmp-2021.4.0-py2.py3-none-manylinux1_i686.whl", hash = "sha256:3b921236a38384e2016f0f3d65af6732cf2c12918087128a9163225451e776f2"}, - {file = "intel_openmp-2021.4.0-py2.py3-none-manylinux1_x86_64.whl", hash = "sha256:e2240ab8d01472fed04f3544a878cda5da16c26232b7ea1b59132dbfb48b186e"}, - {file = "intel_openmp-2021.4.0-py2.py3-none-win32.whl", hash = "sha256:6e863d8fd3d7e8ef389d52cf97a50fe2afe1a19247e8c0d168ce021546f96fc9"}, - {file = "intel_openmp-2021.4.0-py2.py3-none-win_amd64.whl", hash = "sha256:eef4c8bcc8acefd7f5cd3b9384dbf73d59e2c99fc56545712ded913f43c4a94f"}, -] - -[[package]] -name = "jinja2" -version = "3.1.4" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "joblib" -version = "1.4.2" -description = "Lightweight pipelining with Python functions" -optional = false -python-versions = ">=3.8" -files = [ - {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, - {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, -] - -[[package]] -name = "lazy-loader" -version = "0.4" -description = "Makes it easy to load subpackages and functions on demand." -optional = false -python-versions = ">=3.7" -files = [ - {file = "lazy_loader-0.4-py3-none-any.whl", hash = "sha256:342aa8e14d543a154047afb4ba8ef17f5563baad3fc610d7b15b213b0f119efc"}, - {file = "lazy_loader-0.4.tar.gz", hash = "sha256:47c75182589b91a4e1a85a136c074285a5ad4d9f39c63e0d7fb76391c4574cd1"}, -] - -[package.dependencies] -packaging = "*" - -[package.extras] -dev = ["changelist (==0.5)"] -lint = ["pre-commit (==3.7.0)"] -test = ["pytest (>=7.4)", "pytest-cov (>=4.1)"] - -[[package]] -name = "librosa" -version = "0.8.1" -description = "Python module for audio and music processing" -optional = false -python-versions = ">=3.6" -files = [ - {file = "librosa-0.8.1-py3-none-any.whl", hash = "sha256:fd381e2d7067d4d4cf7691f2ef3620ef62a8aa6445dcf407e3328254692f742a"}, - {file = "librosa-0.8.1.tar.gz", hash = "sha256:c53d05e768ae4a3e553ae21c2e5015293e5efbfd5c12d497f1104cb519cca6b3"}, -] - -[package.dependencies] -audioread = ">=2.0.0" -decorator = ">=3.0.0" -joblib = ">=0.14" -numba = ">=0.43.0" -numpy = ">=1.15.0" -packaging = ">=20.0" -pooch = ">=1.0" -resampy = ">=0.2.2" -scikit-learn = ">=0.14.0,<0.19.0 || >0.19.0" -scipy = ">=1.0.0" -soundfile = ">=0.10.2" - -[package.extras] -display = ["matplotlib (>=1.5)"] -docs = ["matplotlib (>=2.0.0,<3.3)", "numba (<0.50)", "numpydoc", "presets", "sphinx (!=1.3.1)", "sphinx-gallery (>=0.7)", "sphinx-multiversion (>=0.2.3)", "sphinx-rtd-theme (==0.5.*)", "spinxcontrib-svg2pdfconverter"] -tests = ["contextlib2", "matplotlib (>=3.0)", "pytest", "pytest-cov", "pytest-mpl", "samplerate", "soxr"] - -[[package]] -name = "llvmlite" -version = "0.43.0" -description = "lightweight wrapper around basic LLVM functionality" -optional = false -python-versions = ">=3.9" -files = [ - {file = "llvmlite-0.43.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a289af9a1687c6cf463478f0fa8e8aa3b6fb813317b0d70bf1ed0759eab6f761"}, - {file = "llvmlite-0.43.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d4fd101f571a31acb1559ae1af30f30b1dc4b3186669f92ad780e17c81e91bc"}, - {file = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d434ec7e2ce3cc8f452d1cd9a28591745de022f931d67be688a737320dfcead"}, - {file = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6912a87782acdff6eb8bf01675ed01d60ca1f2551f8176a300a886f09e836a6a"}, - {file = "llvmlite-0.43.0-cp310-cp310-win_amd64.whl", hash = "sha256:14f0e4bf2fd2d9a75a3534111e8ebeb08eda2f33e9bdd6dfa13282afacdde0ed"}, - {file = "llvmlite-0.43.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3e8d0618cb9bfe40ac38a9633f2493d4d4e9fcc2f438d39a4e854f39cc0f5f98"}, - {file = "llvmlite-0.43.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0a9a1a39d4bf3517f2af9d23d479b4175ead205c592ceeb8b89af48a327ea57"}, - {file = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1da416ab53e4f7f3bc8d4eeba36d801cc1894b9fbfbf2022b29b6bad34a7df2"}, - {file = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977525a1e5f4059316b183fb4fd34fa858c9eade31f165427a3977c95e3ee749"}, - {file = "llvmlite-0.43.0-cp311-cp311-win_amd64.whl", hash = "sha256:d5bd550001d26450bd90777736c69d68c487d17bf371438f975229b2b8241a91"}, - {file = "llvmlite-0.43.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f99b600aa7f65235a5a05d0b9a9f31150c390f31261f2a0ba678e26823ec38f7"}, - {file = "llvmlite-0.43.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:35d80d61d0cda2d767f72de99450766250560399edc309da16937b93d3b676e7"}, - {file = "llvmlite-0.43.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eccce86bba940bae0d8d48ed925f21dbb813519169246e2ab292b5092aba121f"}, - {file = "llvmlite-0.43.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df6509e1507ca0760787a199d19439cc887bfd82226f5af746d6977bd9f66844"}, - {file = "llvmlite-0.43.0-cp312-cp312-win_amd64.whl", hash = "sha256:7a2872ee80dcf6b5dbdc838763d26554c2a18aa833d31a2635bff16aafefb9c9"}, - {file = "llvmlite-0.43.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cd2a7376f7b3367019b664c21f0c61766219faa3b03731113ead75107f3b66c"}, - {file = "llvmlite-0.43.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18e9953c748b105668487b7c81a3e97b046d8abf95c4ddc0cd3c94f4e4651ae8"}, - {file = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74937acd22dc11b33946b67dca7680e6d103d6e90eeaaaf932603bec6fe7b03a"}, - {file = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9efc739cc6ed760f795806f67889923f7274276f0eb45092a1473e40d9b867"}, - {file = "llvmlite-0.43.0-cp39-cp39-win_amd64.whl", hash = "sha256:47e147cdda9037f94b399bf03bfd8a6b6b1f2f90be94a454e3386f006455a9b4"}, - {file = "llvmlite-0.43.0.tar.gz", hash = "sha256:ae2b5b5c3ef67354824fb75517c8db5fbe93bc02cd9671f3c62271626bc041d5"}, -] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "mkl" -version = "2021.4.0" -description = "Intel® oneAPI Math Kernel Library" -optional = false -python-versions = "*" -files = [ - {file = "mkl-2021.4.0-py2.py3-none-macosx_10_15_x86_64.macosx_11_0_x86_64.whl", hash = "sha256:67460f5cd7e30e405b54d70d1ed3ca78118370b65f7327d495e9c8847705e2fb"}, - {file = "mkl-2021.4.0-py2.py3-none-manylinux1_i686.whl", hash = "sha256:636d07d90e68ccc9630c654d47ce9fdeb036bb46e2b193b3a9ac8cfea683cce5"}, - {file = "mkl-2021.4.0-py2.py3-none-manylinux1_x86_64.whl", hash = "sha256:398dbf2b0d12acaf54117a5210e8f191827f373d362d796091d161f610c1ebfb"}, - {file = "mkl-2021.4.0-py2.py3-none-win32.whl", hash = "sha256:439c640b269a5668134e3dcbcea4350459c4a8bc46469669b2d67e07e3d330e8"}, - {file = "mkl-2021.4.0-py2.py3-none-win_amd64.whl", hash = "sha256:ceef3cafce4c009dd25f65d7ad0d833a0fbadc3d8903991ec92351fe5de1e718"}, -] - -[package.dependencies] -intel-openmp = "==2021.*" -tbb = "==2021.*" - -[[package]] -name = "moviepy" -version = "1.0.3" -description = "Video editing with Python" -optional = false -python-versions = "*" -files = [ - {file = "moviepy-1.0.3.tar.gz", hash = "sha256:2884e35d1788077db3ff89e763c5ba7bfddbd7ae9108c9bc809e7ba58fa433f5"}, -] - -[package.dependencies] -decorator = ">=4.0.2,<5.0" -imageio = {version = ">=2.5,<3.0", markers = "python_version >= \"3.4\""} -imageio_ffmpeg = {version = ">=0.2.0", markers = "python_version >= \"3.4\""} -numpy = {version = ">=1.17.3", markers = "python_version > \"2.7\""} -proglog = "<=1.0.0" -requests = ">=2.8.1,<3.0" -tqdm = ">=4.11.2,<5.0" - -[package.extras] -doc = ["Sphinx (>=1.5.2,<2.0)", "numpydoc (>=0.6.0,<1.0)", "pygame (>=1.9.3,<2.0)", "sphinx_rtd_theme (>=0.1.10b0,<1.0)"] -optional = ["matplotlib (>=2.0.0,<3.0)", "opencv-python (>=3.0,<4.0)", "scikit-image (>=0.13.0,<1.0)", "scikit-learn", "scipy (>=0.19.0,<1.5)", "youtube_dl"] -test = ["coverage (<5.0)", "coveralls (>=1.1,<2.0)", "pytest (>=3.0.0,<4.0)", "pytest-cov (>=2.5.1,<3.0)", "requests (>=2.8.1,<3.0)"] - -[[package]] -name = "mpmath" -version = "1.3.0" -description = "Python library for arbitrary-precision floating-point arithmetic" -optional = false -python-versions = "*" -files = [ - {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, - {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, -] - -[package.extras] -develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] -docs = ["sphinx"] -gmpy = ["gmpy2 (>=2.1.0a4)"] -tests = ["pytest (>=4.6)"] - -[[package]] -name = "multidict" -version = "6.0.5" -description = "multidict implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, -] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "networkx" -version = "3.3" -description = "Python package for creating and manipulating graphs and networks" -optional = false -python-versions = ">=3.10" -files = [ - {file = "networkx-3.3-py3-none-any.whl", hash = "sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2"}, - {file = "networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9"}, -] - -[package.extras] -default = ["matplotlib (>=3.6)", "numpy (>=1.23)", "pandas (>=1.4)", "scipy (>=1.9,!=1.11.0,!=1.11.1)"] -developer = ["changelist (==0.5)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] -doc = ["myst-nb (>=1.0)", "numpydoc (>=1.7)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.14)", "sphinx (>=7)", "sphinx-gallery (>=0.14)", "texext (>=0.6.7)"] -extra = ["lxml (>=4.6)", "pydot (>=2.0)", "pygraphviz (>=1.12)", "sympy (>=1.10)"] -test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] - -[[package]] -name = "numba" -version = "0.60.0" -description = "compiling Python code using LLVM" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numba-0.60.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d761de835cd38fb400d2c26bb103a2726f548dc30368853121d66201672e651"}, - {file = "numba-0.60.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:159e618ef213fba758837f9837fb402bbe65326e60ba0633dbe6c7f274d42c1b"}, - {file = "numba-0.60.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1527dc578b95c7c4ff248792ec33d097ba6bef9eda466c948b68dfc995c25781"}, - {file = "numba-0.60.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe0b28abb8d70f8160798f4de9d486143200f34458d34c4a214114e445d7124e"}, - {file = "numba-0.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:19407ced081d7e2e4b8d8c36aa57b7452e0283871c296e12d798852bc7d7f198"}, - {file = "numba-0.60.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a17b70fc9e380ee29c42717e8cc0bfaa5556c416d94f9aa96ba13acb41bdece8"}, - {file = "numba-0.60.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fb02b344a2a80efa6f677aa5c40cd5dd452e1b35f8d1c2af0dfd9ada9978e4b"}, - {file = "numba-0.60.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f4fde652ea604ea3c86508a3fb31556a6157b2c76c8b51b1d45eb40c8598703"}, - {file = "numba-0.60.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4142d7ac0210cc86432b818338a2bc368dc773a2f5cf1e32ff7c5b378bd63ee8"}, - {file = "numba-0.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:cac02c041e9b5bc8cf8f2034ff6f0dbafccd1ae9590dc146b3a02a45e53af4e2"}, - {file = "numba-0.60.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7da4098db31182fc5ffe4bc42c6f24cd7d1cb8a14b59fd755bfee32e34b8404"}, - {file = "numba-0.60.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38d6ea4c1f56417076ecf8fc327c831ae793282e0ff51080c5094cb726507b1c"}, - {file = "numba-0.60.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:62908d29fb6a3229c242e981ca27e32a6e606cc253fc9e8faeb0e48760de241e"}, - {file = "numba-0.60.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0ebaa91538e996f708f1ab30ef4d3ddc344b64b5227b67a57aa74f401bb68b9d"}, - {file = "numba-0.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:f75262e8fe7fa96db1dca93d53a194a38c46da28b112b8a4aca168f0df860347"}, - {file = "numba-0.60.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:01ef4cd7d83abe087d644eaa3d95831b777aa21d441a23703d649e06b8e06b74"}, - {file = "numba-0.60.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:819a3dfd4630d95fd574036f99e47212a1af41cbcb019bf8afac63ff56834449"}, - {file = "numba-0.60.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0b983bd6ad82fe868493012487f34eae8bf7dd94654951404114f23c3466d34b"}, - {file = "numba-0.60.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c151748cd269ddeab66334bd754817ffc0cabd9433acb0f551697e5151917d25"}, - {file = "numba-0.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:3031547a015710140e8c87226b4cfe927cac199835e5bf7d4fe5cb64e814e3ab"}, - {file = "numba-0.60.0.tar.gz", hash = "sha256:5df6158e5584eece5fc83294b949fd30b9f1125df7708862205217e068aabf16"}, -] - -[package.dependencies] -llvmlite = "==0.43.*" -numpy = ">=1.22,<2.1" - -[[package]] -name = "numpy" -version = "1.22.4" -description = "NumPy is the fundamental package for array computing with Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "numpy-1.22.4-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:ba9ead61dfb5d971d77b6c131a9dbee62294a932bf6a356e48c75ae684e635b3"}, - {file = "numpy-1.22.4-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:1ce7ab2053e36c0a71e7a13a7475bd3b1f54750b4b433adc96313e127b870887"}, - {file = "numpy-1.22.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7228ad13744f63575b3a972d7ee4fd61815b2879998e70930d4ccf9ec721dce0"}, - {file = "numpy-1.22.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43a8ca7391b626b4c4fe20aefe79fec683279e31e7c79716863b4b25021e0e74"}, - {file = "numpy-1.22.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a911e317e8c826ea632205e63ed8507e0dc877dcdc49744584dfc363df9ca08c"}, - {file = "numpy-1.22.4-cp310-cp310-win32.whl", hash = "sha256:9ce7df0abeabe7fbd8ccbf343dc0db72f68549856b863ae3dd580255d009648e"}, - {file = "numpy-1.22.4-cp310-cp310-win_amd64.whl", hash = "sha256:3e1ffa4748168e1cc8d3cde93f006fe92b5421396221a02f2274aab6ac83b077"}, - {file = "numpy-1.22.4-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:59d55e634968b8f77d3fd674a3cf0b96e85147cd6556ec64ade018f27e9479e1"}, - {file = "numpy-1.22.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c1d937820db6e43bec43e8d016b9b3165dcb42892ea9f106c70fb13d430ffe72"}, - {file = "numpy-1.22.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4c5d5eb2ec8da0b4f50c9a843393971f31f1d60be87e0fb0917a49133d257d6"}, - {file = "numpy-1.22.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64f56fc53a2d18b1924abd15745e30d82a5782b2cab3429aceecc6875bd5add0"}, - {file = "numpy-1.22.4-cp38-cp38-win32.whl", hash = "sha256:fb7a980c81dd932381f8228a426df8aeb70d59bbcda2af075b627bbc50207cba"}, - {file = "numpy-1.22.4-cp38-cp38-win_amd64.whl", hash = "sha256:e96d7f3096a36c8754207ab89d4b3282ba7b49ea140e4973591852c77d09eb76"}, - {file = "numpy-1.22.4-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:4c6036521f11a731ce0648f10c18ae66d7143865f19f7299943c985cdc95afb5"}, - {file = "numpy-1.22.4-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:b89bf9b94b3d624e7bb480344e91f68c1c6c75f026ed6755955117de00917a7c"}, - {file = "numpy-1.22.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2d487e06ecbf1dc2f18e7efce82ded4f705f4bd0cd02677ffccfb39e5c284c7e"}, - {file = "numpy-1.22.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3eb268dbd5cfaffd9448113539e44e2dd1c5ca9ce25576f7c04a5453edc26fa"}, - {file = "numpy-1.22.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37431a77ceb9307c28382c9773da9f306435135fae6b80b62a11c53cfedd8802"}, - {file = "numpy-1.22.4-cp39-cp39-win32.whl", hash = "sha256:cc7f00008eb7d3f2489fca6f334ec19ca63e31371be28fd5dad955b16ec285bd"}, - {file = "numpy-1.22.4-cp39-cp39-win_amd64.whl", hash = "sha256:f0725df166cf4785c0bc4cbfb320203182b1ecd30fee6e541c8752a92df6aa32"}, - {file = "numpy-1.22.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0791fbd1e43bf74b3502133207e378901272f3c156c4df4954cad833b1380207"}, - {file = "numpy-1.22.4.zip", hash = "sha256:425b390e4619f58d8526b3dcf656dde069133ae5c240229821f01b5f44ea07af"}, -] - -[[package]] -name = "nvidia-cublas-cu12" -version = "12.1.3.1" -description = "CUBLAS native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:ee53ccca76a6fc08fb9701aa95b6ceb242cdaab118c3bb152af4e579af792728"}, - {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-win_amd64.whl", hash = "sha256:2b964d60e8cf11b5e1073d179d85fa340c120e99b3067558f3cf98dd69d02906"}, -] - -[[package]] -name = "nvidia-cuda-cupti-cu12" -version = "12.1.105" -description = "CUDA profiling tools runtime libs." -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:e54fde3983165c624cb79254ae9818a456eb6e87a7fd4d56a2352c24ee542d7e"}, - {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:bea8236d13a0ac7190bd2919c3e8e6ce1e402104276e6f9694479e48bb0eb2a4"}, -] - -[[package]] -name = "nvidia-cuda-nvrtc-cu12" -version = "12.1.105" -description = "NVRTC native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:339b385f50c309763ca65456ec75e17bbefcbbf2893f462cb8b90584cd27a1c2"}, - {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:0a98a522d9ff138b96c010a65e145dc1b4850e9ecb75a0172371793752fd46ed"}, -] - -[[package]] -name = "nvidia-cuda-runtime-cu12" -version = "12.1.105" -description = "CUDA Runtime native Libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:6e258468ddf5796e25f1dc591a31029fa317d97a0a94ed93468fc86301d61e40"}, - {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:dfb46ef84d73fababab44cf03e3b83f80700d27ca300e537f85f636fac474344"}, -] - -[[package]] -name = "nvidia-cudnn-cu12" -version = "8.9.2.26" -description = "cuDNN runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl", hash = "sha256:5ccb288774fdfb07a7e7025ffec286971c06d8d7b4fb162525334616d7629ff9"}, -] - -[package.dependencies] -nvidia-cublas-cu12 = "*" - -[[package]] -name = "nvidia-cufft-cu12" -version = "11.0.2.54" -description = "CUFFT native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl", hash = "sha256:794e3948a1aa71fd817c3775866943936774d1c14e7628c74f6f7417224cdf56"}, - {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-win_amd64.whl", hash = "sha256:d9ac353f78ff89951da4af698f80870b1534ed69993f10a4cf1d96f21357e253"}, -] - -[[package]] -name = "nvidia-curand-cu12" -version = "10.3.2.106" -description = "CURAND native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:9d264c5036dde4e64f1de8c50ae753237c12e0b1348738169cd0f8a536c0e1e0"}, - {file = "nvidia_curand_cu12-10.3.2.106-py3-none-win_amd64.whl", hash = "sha256:75b6b0c574c0037839121317e17fd01f8a69fd2ef8e25853d826fec30bdba74a"}, -] - -[[package]] -name = "nvidia-cusolver-cu12" -version = "11.4.5.107" -description = "CUDA solver native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl", hash = "sha256:8a7ec542f0412294b15072fa7dab71d31334014a69f953004ea7a118206fe0dd"}, - {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-win_amd64.whl", hash = "sha256:74e0c3a24c78612192a74fcd90dd117f1cf21dea4822e66d89e8ea80e3cd2da5"}, -] - -[package.dependencies] -nvidia-cublas-cu12 = "*" -nvidia-cusparse-cu12 = "*" -nvidia-nvjitlink-cu12 = "*" - -[[package]] -name = "nvidia-cusparse-cu12" -version = "12.1.0.106" -description = "CUSPARSE native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:f3b50f42cf363f86ab21f720998517a659a48131e8d538dc02f8768237bd884c"}, - {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-win_amd64.whl", hash = "sha256:b798237e81b9719373e8fae8d4f091b70a0cf09d9d85c95a557e11df2d8e9a5a"}, -] - -[package.dependencies] -nvidia-nvjitlink-cu12 = "*" - -[[package]] -name = "nvidia-nccl-cu12" -version = "2.20.5" -description = "NVIDIA Collective Communication Library (NCCL) Runtime" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1fc150d5c3250b170b29410ba682384b14581db722b2531b0d8d33c595f33d01"}, - {file = "nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:057f6bf9685f75215d0c53bf3ac4a10b3e6578351de307abad9e18a99182af56"}, -] - -[[package]] -name = "nvidia-nvjitlink-cu12" -version = "12.5.82" -description = "Nvidia JIT LTO Library" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_nvjitlink_cu12-12.5.82-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f9b37bc5c8cf7509665cb6ada5aaa0ce65618f2332b7d3e78e9790511f111212"}, - {file = "nvidia_nvjitlink_cu12-12.5.82-py3-none-win_amd64.whl", hash = "sha256:e782564d705ff0bf61ac3e1bf730166da66dd2fe9012f111ede5fc49b64ae697"}, -] - -[[package]] -name = "nvidia-nvtx-cu12" -version = "12.1.105" -description = "NVIDIA Tools Extension" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:dc21cf308ca5691e7c04d962e213f8a4aa9bbfa23d95412f452254c2caeb09e5"}, - {file = "nvidia_nvtx_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:65f4d98982b31b60026e0e6de73fbdfc09d08a96f4656dd3665ca616a11e1e82"}, -] - -[[package]] -name = "nvidia-pytriton" -version = "0.4.2" -description = "PyTriton - Flask/FastAPI-like interface to simplify Triton's deployment in Python environments." -optional = false -python-versions = "<4,>=3.8" -files = [ - {file = "nvidia_pytriton-0.4.2-py3-none-manylinux_2_35_aarch64.whl", hash = "sha256:0d204fa71774c21768c8f985d98aa55d80945fa7066b189e8885035517635956"}, - {file = "nvidia_pytriton-0.4.2-py3-none-manylinux_2_35_x86_64.whl", hash = "sha256:8f79e8bda28961a49d5c64474c17664e10b1bb08018b7ee7bcbd81bcff86e266"}, -] - -[package.dependencies] -numpy = ">=1.21,<2.0" -protobuf = ">=3.7.0" -pyzmq = ">=23.0,<24.0" -sh = ">=1.14,<2.0" -tritonclient = {version = ">=2.39,<3.0", extras = ["all"]} -typing-inspect = ">=0.6.0,<0.7.0" -wrapt = ">=1.11.0" - -[package.extras] -dev = ["black (>=22.8)", "build (>=0.8,<1.0.0)", "ipython (>=7.16)", "isort (>=5.10)", "nvidia-pytriton[doc]", "nvidia-pytriton[test]", "pip (>=21.3)", "pudb (>=2022.1.3)", "twine (>=4.0)"] -doc = ["GitPython (>=3.1.30)", "mike (>=2.0.0)", "mkdocs-htmlproofer-plugin (>=0.8.0)", "mkdocs-material (>=8.5.6)", "mkdocstrings[python] (>=0.19.0)"] -test = ["alt-pytest-asyncio (>=0.7,<1.0)", "pre-commit (>=2.20.0)", "psutil (>=5.1,<6.0)", "py-spy (>=0.3,<1.0)", "pytest (>=7.2,<8.0)", "pytest-codeblocks (>=0.16,<1.0)", "pytest-mock (>=3.8,<4.0)", "pytest-timeout (>=2.1,<3.0)", "pytype (!=2021.11.18,!=2022.2.17)", "tox (>=3.23.1)", "tqdm (>=4.64.1)"] - -[[package]] -name = "opencv-python" -version = "4.7.0.72" -description = "Wrapper package for OpenCV python bindings." -optional = false -python-versions = ">=3.6" -files = [ - {file = "opencv-python-4.7.0.72.tar.gz", hash = "sha256:3424794a711f33284581f3c1e4b071cfc827d02b99d6fd9a35391f517c453306"}, - {file = "opencv_python-4.7.0.72-cp37-abi3-macosx_10_16_x86_64.whl", hash = "sha256:d4f8880440c433a0025d78804dda6901d1e8e541a561dda66892d90290aef881"}, - {file = "opencv_python-4.7.0.72-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:7a297e7651e22eb17c265ddbbc80e2ba2a8ff4f4a1696a67c45e5f5798245842"}, - {file = "opencv_python-4.7.0.72-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd08343654c6b88c5a8c25bf425f8025aed2e3189b4d7306b5861d32affaf737"}, - {file = "opencv_python-4.7.0.72-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebfc0a3a2f57716e709028b992e4de7fd8752105d7a768531c4f434043c6f9ff"}, - {file = "opencv_python-4.7.0.72-cp37-abi3-win32.whl", hash = "sha256:eda115797b114fc16ca6f182b91c5d984f0015c19bec3145e55d33d708e9bae1"}, - {file = "opencv_python-4.7.0.72-cp37-abi3-win_amd64.whl", hash = "sha256:812af57553ec1c6709060c63f6b7e9ad07ddc0f592f3ccc6d00c71e0fe0e6376"}, -] - -[package.dependencies] -numpy = {version = ">=1.22.0", markers = "python_version >= \"3.11\""} - -[[package]] -name = "opencv-python" -version = "4.10.0.84" -description = "Wrapper package for OpenCV python bindings." -optional = false -python-versions = ">=3.6" -files = [ - {file = "opencv-python-4.10.0.84.tar.gz", hash = "sha256:72d234e4582e9658ffea8e9cae5b63d488ad06994ef12d81dc303b17472f3526"}, - {file = "opencv_python-4.10.0.84-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:fc182f8f4cda51b45f01c64e4cbedfc2f00aff799debebc305d8d0210c43f251"}, - {file = "opencv_python-4.10.0.84-cp37-abi3-macosx_12_0_x86_64.whl", hash = "sha256:71e575744f1d23f79741450254660442785f45a0797212852ee5199ef12eed98"}, - {file = "opencv_python-4.10.0.84-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09a332b50488e2dda866a6c5573ee192fe3583239fb26ff2f7f9ceb0bc119ea6"}, - {file = "opencv_python-4.10.0.84-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ace140fc6d647fbe1c692bcb2abce768973491222c067c131d80957c595b71f"}, - {file = "opencv_python-4.10.0.84-cp37-abi3-win32.whl", hash = "sha256:2db02bb7e50b703f0a2d50c50ced72e95c574e1e5a0bb35a8a86d0b35c98c236"}, - {file = "opencv_python-4.10.0.84-cp37-abi3-win_amd64.whl", hash = "sha256:32dbbd94c26f611dc5cc6979e6b7aa1f55a64d6b463cc1dcd3c95505a63e48fe"}, -] - -[package.dependencies] -numpy = [ - {version = ">=1.21.4", markers = "python_version >= \"3.10\" and platform_system == \"Darwin\" and python_version < \"3.11\""}, - {version = ">=1.21.2", markers = "platform_system != \"Darwin\" and python_version >= \"3.10\" and python_version < \"3.11\""}, -] - -[[package]] -name = "packaging" -version = "24.1" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, -] - -[[package]] -name = "pandas" -version = "1.3.5" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.7.1" -files = [ - {file = "pandas-1.3.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:62d5b5ce965bae78f12c1c0df0d387899dd4211ec0bdc52822373f13a3a022b9"}, - {file = "pandas-1.3.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:adfeb11be2d54f275142c8ba9bf67acee771b7186a5745249c7d5a06c670136b"}, - {file = "pandas-1.3.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60a8c055d58873ad81cae290d974d13dd479b82cbb975c3e1fa2cf1920715296"}, - {file = "pandas-1.3.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd541ab09e1f80a2a1760032d665f6e032d8e44055d602d65eeea6e6e85498cb"}, - {file = "pandas-1.3.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2651d75b9a167cc8cc572cf787ab512d16e316ae00ba81874b560586fa1325e0"}, - {file = "pandas-1.3.5-cp310-cp310-win_amd64.whl", hash = "sha256:aaf183a615ad790801fa3cf2fa450e5b6d23a54684fe386f7e3208f8b9bfbef6"}, - {file = "pandas-1.3.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:344295811e67f8200de2390093aeb3c8309f5648951b684d8db7eee7d1c81fb7"}, - {file = "pandas-1.3.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:552020bf83b7f9033b57cbae65589c01e7ef1544416122da0c79140c93288f56"}, - {file = "pandas-1.3.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cce0c6bbeb266b0e39e35176ee615ce3585233092f685b6a82362523e59e5b4"}, - {file = "pandas-1.3.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d28a3c65463fd0d0ba8bbb7696b23073efee0510783340a44b08f5e96ffce0c"}, - {file = "pandas-1.3.5-cp37-cp37m-win32.whl", hash = "sha256:a62949c626dd0ef7de11de34b44c6475db76995c2064e2d99c6498c3dba7fe58"}, - {file = "pandas-1.3.5-cp37-cp37m-win_amd64.whl", hash = "sha256:8025750767e138320b15ca16d70d5cdc1886e8f9cc56652d89735c016cd8aea6"}, - {file = "pandas-1.3.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fe95bae4e2d579812865db2212bb733144e34d0c6785c0685329e5b60fcb85dd"}, - {file = "pandas-1.3.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f261553a1e9c65b7a310302b9dbac31cf0049a51695c14ebe04e4bfd4a96f02"}, - {file = "pandas-1.3.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b6dbec5f3e6d5dc80dcfee250e0a2a652b3f28663492f7dab9a24416a48ac39"}, - {file = "pandas-1.3.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3bc49af96cd6285030a64779de5b3688633a07eb75c124b0747134a63f4c05f"}, - {file = "pandas-1.3.5-cp38-cp38-win32.whl", hash = "sha256:b6b87b2fb39e6383ca28e2829cddef1d9fc9e27e55ad91ca9c435572cdba51bf"}, - {file = "pandas-1.3.5-cp38-cp38-win_amd64.whl", hash = "sha256:a395692046fd8ce1edb4c6295c35184ae0c2bbe787ecbe384251da609e27edcb"}, - {file = "pandas-1.3.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bd971a3f08b745a75a86c00b97f3007c2ea175951286cdda6abe543e687e5f2f"}, - {file = "pandas-1.3.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37f06b59e5bc05711a518aa10beaec10942188dccb48918bb5ae602ccbc9f1a0"}, - {file = "pandas-1.3.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c21778a688d3712d35710501f8001cdbf96eb70a7c587a3d5613573299fdca6"}, - {file = "pandas-1.3.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3345343206546545bc26a05b4602b6a24385b5ec7c75cb6059599e3d56831da2"}, - {file = "pandas-1.3.5-cp39-cp39-win32.whl", hash = "sha256:c69406a2808ba6cf580c2255bcf260b3f214d2664a3a4197d0e640f573b46fd3"}, - {file = "pandas-1.3.5-cp39-cp39-win_amd64.whl", hash = "sha256:32e1a26d5ade11b547721a72f9bfc4bd113396947606e00d5b4a5b79b3dcb006"}, - {file = "pandas-1.3.5.tar.gz", hash = "sha256:1e4285f5de1012de20ca46b188ccf33521bff61ba5c5ebd78b4fb28e5416a9f1"}, -] - -[package.dependencies] -numpy = {version = ">=1.21.0", markers = "python_version >= \"3.10\""} -python-dateutil = ">=2.7.3" -pytz = ">=2017.3" - -[package.extras] -test = ["hypothesis (>=3.58)", "pytest (>=6.0)", "pytest-xdist"] - -[[package]] -name = "pillow" -version = "9.1.0" -description = "Python Imaging Library (Fork)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "Pillow-9.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:af79d3fde1fc2e33561166d62e3b63f0cc3e47b5a3a2e5fea40d4917754734ea"}, - {file = "Pillow-9.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:55dd1cf09a1fd7c7b78425967aacae9b0d70125f7d3ab973fadc7b5abc3de652"}, - {file = "Pillow-9.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66822d01e82506a19407d1afc104c3fcea3b81d5eb11485e593ad6b8492f995a"}, - {file = "Pillow-9.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5eaf3b42df2bcda61c53a742ee2c6e63f777d0e085bbc6b2ab7ed57deb13db7"}, - {file = "Pillow-9.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01ce45deec9df310cbbee11104bae1a2a43308dd9c317f99235b6d3080ddd66e"}, - {file = "Pillow-9.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aea7ce61328e15943d7b9eaca87e81f7c62ff90f669116f857262e9da4057ba3"}, - {file = "Pillow-9.1.0-cp310-cp310-win32.whl", hash = "sha256:7a053bd4d65a3294b153bdd7724dce864a1d548416a5ef61f6d03bf149205160"}, - {file = "Pillow-9.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:97bda660702a856c2c9e12ec26fc6d187631ddfd896ff685814ab21ef0597033"}, - {file = "Pillow-9.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:21dee8466b42912335151d24c1665fcf44dc2ee47e021d233a40c3ca5adae59c"}, - {file = "Pillow-9.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b6d4050b208c8ff886fd3db6690bf04f9a48749d78b41b7a5bf24c236ab0165"}, - {file = "Pillow-9.1.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5cfca31ab4c13552a0f354c87fbd7f162a4fafd25e6b521bba93a57fe6a3700a"}, - {file = "Pillow-9.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed742214068efa95e9844c2d9129e209ed63f61baa4d54dbf4cf8b5e2d30ccf2"}, - {file = "Pillow-9.1.0-cp37-cp37m-win32.whl", hash = "sha256:c9efef876c21788366ea1f50ecb39d5d6f65febe25ad1d4c0b8dff98843ac244"}, - {file = "Pillow-9.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:de344bcf6e2463bb25179d74d6e7989e375f906bcec8cb86edb8b12acbc7dfef"}, - {file = "Pillow-9.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:17869489de2fce6c36690a0c721bd3db176194af5f39249c1ac56d0bb0fcc512"}, - {file = "Pillow-9.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:25023a6209a4d7c42154073144608c9a71d3512b648a2f5d4465182cb93d3477"}, - {file = "Pillow-9.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8782189c796eff29dbb37dd87afa4ad4d40fc90b2742704f94812851b725964b"}, - {file = "Pillow-9.1.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:463acf531f5d0925ca55904fa668bb3461c3ef6bc779e1d6d8a488092bdee378"}, - {file = "Pillow-9.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f42364485bfdab19c1373b5cd62f7c5ab7cc052e19644862ec8f15bb8af289e"}, - {file = "Pillow-9.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3fddcdb619ba04491e8f771636583a7cc5a5051cd193ff1aa1ee8616d2a692c5"}, - {file = "Pillow-9.1.0-cp38-cp38-win32.whl", hash = "sha256:4fe29a070de394e449fd88ebe1624d1e2d7ddeed4c12e0b31624561b58948d9a"}, - {file = "Pillow-9.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:c24f718f9dd73bb2b31a6201e6db5ea4a61fdd1d1c200f43ee585fc6dcd21b34"}, - {file = "Pillow-9.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fb89397013cf302f282f0fc998bb7abf11d49dcff72c8ecb320f76ea6e2c5717"}, - {file = "Pillow-9.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c870193cce4b76713a2b29be5d8327c8ccbe0d4a49bc22968aa1e680930f5581"}, - {file = "Pillow-9.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69e5ddc609230d4408277af135c5b5c8fe7a54b2bdb8ad7c5100b86b3aab04c6"}, - {file = "Pillow-9.1.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35be4a9f65441d9982240e6966c1eaa1c654c4e5e931eaf580130409e31804d4"}, - {file = "Pillow-9.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82283af99c1c3a5ba1da44c67296d5aad19f11c535b551a5ae55328a317ce331"}, - {file = "Pillow-9.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a325ac71914c5c043fa50441b36606e64a10cd262de12f7a179620f579752ff8"}, - {file = "Pillow-9.1.0-cp39-cp39-win32.whl", hash = "sha256:a598d8830f6ef5501002ae85c7dbfcd9c27cc4efc02a1989369303ba85573e58"}, - {file = "Pillow-9.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0c51cb9edac8a5abd069fd0758ac0a8bfe52c261ee0e330f363548aca6893595"}, - {file = "Pillow-9.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a336a4f74baf67e26f3acc4d61c913e378e931817cd1e2ef4dfb79d3e051b481"}, - {file = "Pillow-9.1.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb1b89b11256b5b6cad5e7593f9061ac4624f7651f7a8eb4dfa37caa1dfaa4d0"}, - {file = "Pillow-9.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:255c9d69754a4c90b0ee484967fc8818c7ff8311c6dddcc43a4340e10cd1636a"}, - {file = "Pillow-9.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5a3ecc026ea0e14d0ad7cd990ea7f48bfcb3eb4271034657dc9d06933c6629a7"}, - {file = "Pillow-9.1.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5b0ff59785d93b3437c3703e3c64c178aabada51dea2a7f2c5eccf1bcf565a3"}, - {file = "Pillow-9.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7110ec1701b0bf8df569a7592a196c9d07c764a0a74f65471ea56816f10e2c8"}, - {file = "Pillow-9.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8d79c6f468215d1a8415aa53d9868a6b40c4682165b8cb62a221b1baa47db458"}, - {file = "Pillow-9.1.0.tar.gz", hash = "sha256:f401ed2bbb155e1ade150ccc63db1a4f6c1909d3d378f7d1235a44e90d75fb97"}, -] - -[package.extras] -docs = ["olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinx-rtd-theme (>=1.0)", "sphinxext-opengraph"] -tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "platformdirs" -version = "4.2.2" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] - -[[package]] -name = "pooch" -version = "1.8.2" -description = "A friend to fetch your data files" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pooch-1.8.2-py3-none-any.whl", hash = "sha256:3529a57096f7198778a5ceefd5ac3ef0e4d06a6ddaf9fc2d609b806f25302c47"}, - {file = "pooch-1.8.2.tar.gz", hash = "sha256:76561f0de68a01da4df6af38e9955c4c9d1a5c90da73f7e40276a5728ec83d10"}, -] - -[package.dependencies] -packaging = ">=20.0" -platformdirs = ">=2.5.0" -requests = ">=2.19.0" - -[package.extras] -progress = ["tqdm (>=4.41.0,<5.0.0)"] -sftp = ["paramiko (>=2.7.0)"] -xxhash = ["xxhash (>=1.4.3)"] - -[[package]] -name = "proglog" -version = "0.1.10" -description = "Log and progress bar manager for console, notebooks, web..." -optional = false -python-versions = "*" -files = [ - {file = "proglog-0.1.10-py3-none-any.whl", hash = "sha256:19d5da037e8c813da480b741e3fa71fb1ac0a5b02bf21c41577c7f327485ec50"}, - {file = "proglog-0.1.10.tar.gz", hash = "sha256:658c28c9c82e4caeb2f25f488fff9ceace22f8d69b15d0c1c86d64275e4ddab4"}, -] - -[package.dependencies] -tqdm = "*" - -[[package]] -name = "protobuf" -version = "5.27.2" -description = "" -optional = false -python-versions = ">=3.8" -files = [ - {file = "protobuf-5.27.2-cp310-abi3-win32.whl", hash = "sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38"}, - {file = "protobuf-5.27.2-cp310-abi3-win_amd64.whl", hash = "sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505"}, - {file = "protobuf-5.27.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5"}, - {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b"}, - {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e"}, - {file = "protobuf-5.27.2-cp38-cp38-win32.whl", hash = "sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863"}, - {file = "protobuf-5.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6"}, - {file = "protobuf-5.27.2-cp39-cp39-win32.whl", hash = "sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca"}, - {file = "protobuf-5.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce"}, - {file = "protobuf-5.27.2-py3-none-any.whl", hash = "sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470"}, - {file = "protobuf-5.27.2.tar.gz", hash = "sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714"}, -] - -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] - -[[package]] -name = "pycparser" -version = "2.22" -description = "C parser in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, -] - -[[package]] -name = "pydub" -version = "0.25.1" -description = "Manipulate audio with an simple and easy high level interface" -optional = false -python-versions = "*" -files = [ - {file = "pydub-0.25.1-py2.py3-none-any.whl", hash = "sha256:65617e33033874b59d87db603aa1ed450633288aefead953b30bded59cb599a6"}, - {file = "pydub-0.25.1.tar.gz", hash = "sha256:980a33ce9949cab2a569606b65674d748ecbca4f0796887fd6f46173a7b0d30f"}, -] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-rapidjson" -version = "1.18" -description = "Python wrapper around rapidjson" -optional = false -python-versions = ">=3.6" -files = [ - {file = "python-rapidjson-1.18.tar.gz", hash = "sha256:09a5c362e2fec2a41b53e79e88bd8f0704447cb67c1c89a59e3092ccb4a30355"}, - {file = "python_rapidjson-1.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f38c7ca5fee31423bb34f464c789f57954886dd00e1a8c8483fd13e0c0d2583"}, - {file = "python_rapidjson-1.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1912224817f734ee9138e91d170b62818fd01caa731aa8668e8c9bce9017fe7e"}, - {file = "python_rapidjson-1.18-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2af6ca181e812f2306d4806beb974334ddd0774a8f62194ad1721277236f4ad1"}, - {file = "python_rapidjson-1.18-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08f859f64470ecb307cdcd7a532bef9c9ab3c94d2005c5693a7e18b3a11d4b28"}, - {file = "python_rapidjson-1.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:507595740300e95dded254536558cd56733cc3207e3c2457f19231ad00e78d85"}, - {file = "python_rapidjson-1.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5815af2f69a11c114e5004a77b8b036b5abcd06202c8bc1525856f9d836254a3"}, - {file = "python_rapidjson-1.18-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d680b8c8f4dbceb465544bbdd28463aa7e0b651343aa73c2476533bf300e0266"}, - {file = "python_rapidjson-1.18-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ff22c4160227be38322a88856f011c95d199103c30993bf3ee64f4bce9221807"}, - {file = "python_rapidjson-1.18-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:9deb8a8a2df2982b446f2a19264a5da2780ddb415caf9e11d48e74701053f02e"}, - {file = "python_rapidjson-1.18-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f6ecd86abf689538fdab5a55483c38bf10bdd9a8ed204ae10fa5a1bac7222d88"}, - {file = "python_rapidjson-1.18-cp310-cp310-win32.whl", hash = "sha256:a9d4cd0be643b8310c1c92987961c06b68429527154e9bea75118802cd179178"}, - {file = "python_rapidjson-1.18-cp310-cp310-win_amd64.whl", hash = "sha256:52f1d509ec20ab5d26f6dbc5d56821e0b2b1a5a878439eb0b3a33137b59029f5"}, - {file = "python_rapidjson-1.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:83912aae7c508204c263818befa24cf3223ecf0175e70d0412169e1302f1b4f2"}, - {file = "python_rapidjson-1.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0f45a02e4593879772099cf88d18dbde3376334684a809feb9228b8745c0c08c"}, - {file = "python_rapidjson-1.18-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f201e0c1e41c0e491cf2eca121d51f30c666f35ce33a6d14ba8fc5b76e4a2fa7"}, - {file = "python_rapidjson-1.18-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:697d06a66a1ba267f5abbb04971e461df1d4528ba341af6848a1ef01ae224e90"}, - {file = "python_rapidjson-1.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7e22b841fda1ec8c9e0a49069fbc6579363ba79fa5398fc7d37666357068cf"}, - {file = "python_rapidjson-1.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:356b2f81e6cdb4c1bb9122b635c8bd827f845da7c0de8618874c933fb88de573"}, - {file = "python_rapidjson-1.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:acd2430dd7a8f66618247635c51a9413679e9a5279aaea708f854ef03cc933e1"}, - {file = "python_rapidjson-1.18-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a587b3ec2b76480dfb57409654a9344ab47910e1b9d09e1c8eefe2db6c8c7364"}, - {file = "python_rapidjson-1.18-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2cf502e6c01d0933dc65888ab62b86d67967903c9a66158c2e458b312e671345"}, - {file = "python_rapidjson-1.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:43e622aa170f0b1e04f5b5ac8c7bf94b99f79efceb3608d8f1456f617cd79cdb"}, - {file = "python_rapidjson-1.18-cp311-cp311-win32.whl", hash = "sha256:f9c9faa7c1df63e2b238fcbdb915d52eba9ba42ee6e2a502f81e8aac07938783"}, - {file = "python_rapidjson-1.18-cp311-cp311-win_amd64.whl", hash = "sha256:e7b1cadf5c8852ae6e0a19fcf5b734eef4f92170292686cfdcced1302ea0aa20"}, - {file = "python_rapidjson-1.18-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52912323a2ac460ea605ab55f437196f662ec9db82669367dab4cda8f4c05b13"}, - {file = "python_rapidjson-1.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ebbd471d63bfa3c09180fd44eefec7b0f46ca41ee4552559c3a027799c67d781"}, - {file = "python_rapidjson-1.18-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb89a794242a692ef5d15ec9ad14c21fd17abc4671af62eadc8e6a1861a0319"}, - {file = "python_rapidjson-1.18-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcae2fdd5a2520dc85f98224ba1fc96badd0b68d3a8ee41485b3e37be67b7bef"}, - {file = "python_rapidjson-1.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f46face2b3e6891dd646dc1062c1133323ce4dc26409a084535f2af9e2bb4e3"}, - {file = "python_rapidjson-1.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67a3f71225200344ffaab3d28add533398b92f65d9166e649222a50677370fd2"}, - {file = "python_rapidjson-1.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7851252083aba29668cf1f02dc1c1e5e5a9113bf4f1dedc2f509c00e43f0c884"}, - {file = "python_rapidjson-1.18-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:32c32256edb35a234b16dfa6452bdf066cc272675cf9b3eb980e853505202766"}, - {file = "python_rapidjson-1.18-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5f9d27c090782f83de06dd51b9a7143b04c32314e53ed531a2d8f170f9f255e9"}, - {file = "python_rapidjson-1.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d3e0b8863cc0e78e36d41aae856101291c0bea9215690decafa6bae5f413e1f3"}, - {file = "python_rapidjson-1.18-cp312-cp312-win32.whl", hash = "sha256:123e7bf9726c09055d97ba0c4fc8cdb9deda80c2a9d5409bfd49935a0f38d0b2"}, - {file = "python_rapidjson-1.18-cp312-cp312-win_amd64.whl", hash = "sha256:03d14892a1cdc24e5b200ca619fda397e0f36a3d1420edcb7212ae47d4d9fd3e"}, - {file = "python_rapidjson-1.18-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1d4861adede630a5eee77c46f9c901da2ac15bc3c0296ad851d69036db3a0374"}, - {file = "python_rapidjson-1.18-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:35d0e9c8dd61670b5833546b3ded057b68e696ab530d3c14603e718a4bc3db00"}, - {file = "python_rapidjson-1.18-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d229114f738ee0d9ff1b727aaf7bfe6a90d6f77e0449b33f87ad7814c493c921"}, - {file = "python_rapidjson-1.18-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb0a8361b668e920d7fa78f725f59d224adedb3620f526509cef4416778e3393"}, - {file = "python_rapidjson-1.18-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20256271a00f758a96ccfdd61434c11a1fc6b5e3fd4e7324dd832e576c9f720b"}, - {file = "python_rapidjson-1.18-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad82fa706d7437ceb0d8e36870715e8318359bc604016fc505c14ccc109322e9"}, - {file = "python_rapidjson-1.18-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f13a8be00c0fd31c75304f03df1240d16268720b9d12eca3d055f702dd607427"}, - {file = "python_rapidjson-1.18-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e9712964a864c7604319bebbdd4ab5de9a42698d3c9a6c15c964a06d586a2c66"}, - {file = "python_rapidjson-1.18-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0f36f9c194d8c893463128a57bd7cde3bb28151eaf5bb5db5f552de0eb0eb93"}, - {file = "python_rapidjson-1.18-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4704f9f987a62c4b7e419843bb3c5daea81271dba95cae47e92b2475978ae66b"}, - {file = "python_rapidjson-1.18-cp313-cp313-win32.whl", hash = "sha256:2d197c686a4eacc2defe9bd31bf73b23877ad4974857b72b65e126cef7a50fa5"}, - {file = "python_rapidjson-1.18-cp313-cp313-win_amd64.whl", hash = "sha256:30f4a317af410d3977cf405737a2d6e81c6695d24df33113523023f665bb5e75"}, - {file = "python_rapidjson-1.18-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:300b8d84d5bebea7988312950fc949c1701055086b2790afaaad68e8f1cf389d"}, - {file = "python_rapidjson-1.18-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:981dd50522999a4fe519ca14135e20b3acc4928df4d4421d96792913d2fb359d"}, - {file = "python_rapidjson-1.18-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d03390ac612090c58553e1d8454faff6099a2b2ee0c44ebd19546d5a73b30689"}, - {file = "python_rapidjson-1.18-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0550caca5227e3f929b63b758c19c584f39c10d4e1c4ad9b7e322f19030db3b8"}, - {file = "python_rapidjson-1.18-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37295c26b6270515666243d499c060006471b0517dbdf7690b5f855b9531f9b8"}, - {file = "python_rapidjson-1.18-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d058b9c740c55fe3ffab826742773f995620992eda6a31d794727526d0ea1610"}, - {file = "python_rapidjson-1.18-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0c4697e3fa587c7f3938d2394ff6563085bbf346e4cab29fb425595d267a59d1"}, - {file = "python_rapidjson-1.18-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:aa8fbc9c31d9320e80a290d3cf847756d37290628ccaad3719de6fa51ab43597"}, - {file = "python_rapidjson-1.18-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:191e051b7b384474b6558902b8c33f82474492e3d19cc188224cd1a5584ca4bf"}, - {file = "python_rapidjson-1.18-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dd0bc1b3d4d72bd3eb9f60f84473fcefb316912422267bf06d8c2290ef33e02"}, - {file = "python_rapidjson-1.18-cp38-cp38-win32.whl", hash = "sha256:1925a3ed72504812ab1d8edd59ad83bd4b96b5a3e149ee927f3cdb98b803ac22"}, - {file = "python_rapidjson-1.18-cp38-cp38-win_amd64.whl", hash = "sha256:4e21cbd8585598ce091990196fe6fe354c607e13e2b17794f3711a8f2b2b8b11"}, - {file = "python_rapidjson-1.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:68230f34a076a54298d5c860ae8aa08e3de5ab5a289b23b96a0a6039861f911b"}, - {file = "python_rapidjson-1.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b1ec8b167484523bc0d753998594cb2614061755191946b73c7e88e124287595"}, - {file = "python_rapidjson-1.18-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bad5d1a46b2d07f1d9b4ad1c316a36e024da451ff876d1572cb345c6bb50a42"}, - {file = "python_rapidjson-1.18-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:daf270f1d2feddf7680ddc2faf2778e814caf569095cc60c2079e856af3d2bc3"}, - {file = "python_rapidjson-1.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72948a56b9d4964d72f2f3862d5d168b247457f9d1e70cee750a0cd660f67555"}, - {file = "python_rapidjson-1.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0624eebe2ceba44dd84a3d3409fab1e7e1a021c3701b5ad5bd8a0fba47898d20"}, - {file = "python_rapidjson-1.18-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b975fcecbf5f3845ce72040be4630ece4c5b467c24c749be2a81827918a2e530"}, - {file = "python_rapidjson-1.18-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f725f560f4865fb5b684a26935f78690e44aa475c8b41a793d096a122115c9b3"}, - {file = "python_rapidjson-1.18-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:0a31ea1a7a11a6e60fed42364e6726d29346f6ba1a9212ea1b6753731f600909"}, - {file = "python_rapidjson-1.18-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:477aff79a2d87daee45c49e917097426fe5495f99fb935a5adb20716cb52c86a"}, - {file = "python_rapidjson-1.18-cp39-cp39-win32.whl", hash = "sha256:d13a0e3f647726f653cd3d6bfc770d595f51d75212b38df82d2a465bc0df5dd8"}, - {file = "python_rapidjson-1.18-cp39-cp39-win_amd64.whl", hash = "sha256:412c716cbf41ecfb99879443fc11288513053e63302232df0ed99d629fd220da"}, -] - -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - -[[package]] -name = "pywavelets" -version = "1.6.0" -description = "PyWavelets, wavelet transform module" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pywavelets-1.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ddc1ff5ad706313d930f857f9656f565dfb81b85bbe58a9db16ad8fa7d1537c5"}, - {file = "pywavelets-1.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:78feab4e0c25fa32034b6b64cb854c6ce15663b4f0ffb25d8f0ee58915300f9b"}, - {file = "pywavelets-1.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be36f08efe9bc3abf40cf40cd2ee0aa0db26e4894e13ce5ac178442864161e8c"}, - {file = "pywavelets-1.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0595c51472c9c5724fe087cb73e2797053fd25c788d6553fdad6ff61abc60e91"}, - {file = "pywavelets-1.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:058a750477dde633ac53b8806f835af3559d52db6532fb2b93c1f4b5441365b8"}, - {file = "pywavelets-1.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:538795d9c4181152b414285b5a7f72ac52581ecdcdce74b6cca3fa0b8a5ab0aa"}, - {file = "pywavelets-1.6.0-cp310-cp310-win32.whl", hash = "sha256:47de024ba4f9df97e98b5f540340e1a9edd82d2c477450bef8c9b5381487128e"}, - {file = "pywavelets-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:e2c44760c0906ddf2176920a2613287f6eea947f166ce7eee9546081b06a6835"}, - {file = "pywavelets-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d91aaaf6de53b758bcdc96c81cdb5a8607758602be49f691188c0e108cf1e738"}, - {file = "pywavelets-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3b5302edb6d1d1ff6636d37c9ff29c4892f2a3648d736cc1df01f3f36e25c8cf"}, - {file = "pywavelets-1.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5e655446e37a3c87213d5c6386b86f65c4d61736b4432d720171e7dd6523d6a"}, - {file = "pywavelets-1.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ec7d69b746a0eaa327b829a3252a63619f2345e263177be5dd9bf30d7933c8d"}, - {file = "pywavelets-1.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:97ea9613bd6b7108ebb44b709060adc7e2d5fac73be7152342bdd5513d75f84e"}, - {file = "pywavelets-1.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:48b3813c6d1a7a8194f37dbb5dbbdf2fe1112152c91445ea2e54f64ff6350c36"}, - {file = "pywavelets-1.6.0-cp311-cp311-win32.whl", hash = "sha256:4ffb484d096a5eb10af7121e0203546a03e1369328df321a33ef91f67bac40cf"}, - {file = "pywavelets-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:274bc47b289585383aa65519b3fcae5b4dee5e31db3d4198d4fad701a70e59f7"}, - {file = "pywavelets-1.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d6ec113386a432e04103f95e351d2657b42145bd1e1ed26513423391bcb5f011"}, - {file = "pywavelets-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab652112d3932d21f020e281e06926a751354c2b5629fb716f5eb9d0104b84e5"}, - {file = "pywavelets-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47b0314a22616c5f3f08760f0e00b4a15b7c7dadca5e39bb701cf7869a4207c5"}, - {file = "pywavelets-1.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138471513bc0a4cd2ddc4e50c7ec04e3468c268e101a0d02f698f6aedd1d5e79"}, - {file = "pywavelets-1.6.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:67936491ae3e5f957c428e34fdaed21f131535b8d60c7c729a1b539ce8864837"}, - {file = "pywavelets-1.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:dd798cee3d28fb3d32a26a00d9831a20bf316c36d685e4ced01b4e4a8f36f5ce"}, - {file = "pywavelets-1.6.0-cp312-cp312-win32.whl", hash = "sha256:e772f7f0c16bfc3be8ac3cd10d29a9920bb7a39781358856223c491b899e6e79"}, - {file = "pywavelets-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:4ef15a63a72afa67ae9f4f3b06c95c5382730fb3075e668d49a880e65f2f089c"}, - {file = "pywavelets-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:627df378e63e9c789b6f2e7060cb4264ebae6f6b0efc1da287a2c060de454a1f"}, - {file = "pywavelets-1.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a413b51dc19e05243fe0b0864a8e8a16b5ca9bf2e4713da00a95b1b5747a5367"}, - {file = "pywavelets-1.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be615c6c1873e189c265d4a76d1751ec49b17e29725e6dd2e9c74f1868f590b7"}, - {file = "pywavelets-1.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4021ef69ec9f3862f66580fc4417be728bd78722914394594b48212fd1fcaf21"}, - {file = "pywavelets-1.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8fbf7b61b28b5457693c034e58a01622756d1fd60a80ae13ac5888b1d3e57e80"}, - {file = "pywavelets-1.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f58ddbb0a6cd243928876edfc463b990763a24fb94498607d6fea690e32cca4c"}, - {file = "pywavelets-1.6.0-cp39-cp39-win32.whl", hash = "sha256:42a22e68e345b6de7d387ef752111ab4530c98048d2b4bdac8ceefb078b4ead6"}, - {file = "pywavelets-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:32198de321892743c1a3d1957fe1cd8a8ecc078bfbba6b8f3982518e897271d7"}, - {file = "pywavelets-1.6.0.tar.gz", hash = "sha256:ea027c70977122c5fc27b2510f0a0d9528f9c3df6ea3e4c577ca55fd00325a5b"}, -] - -[package.dependencies] -numpy = ">=1.22.4,<3" - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "pyzmq" -version = "23.2.1" -description = "Python bindings for 0MQ" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pyzmq-23.2.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:a3fd44b5046d247e7f0f1660bcafe7b5fb0db55d0934c05dd57dda9e1f823ce7"}, - {file = "pyzmq-23.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2141e6798d5981be04c08996d27962086a1aa3ea536fe9cf7e89817fd4523f86"}, - {file = "pyzmq-23.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a39ddb0431a68954bd318b923230fa5b649c9c62b0e8340388820c5f1b15bd2"}, - {file = "pyzmq-23.2.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e06747014a5ad1b28cebf5bc1ddcdaccfb44e9b441d35e6feb1286c8a72e54be"}, - {file = "pyzmq-23.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e0113d70b095339e99bb522fe7294f5ae6a7f3b2b8f52f659469a74b5cc7661"}, - {file = "pyzmq-23.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:71b32a1e827bdcbf73750e60370d3b07685816ff3d8695f450f0f8c3226503f8"}, - {file = "pyzmq-23.2.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:55568a020ad2cae9ae36da6058e7ca332a56df968f601cbdb7cf6efb2a77579a"}, - {file = "pyzmq-23.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c02a0cd39dc01659b3d6cb70bb3a41aebd9885fd78239acdd8d9c91351c4568"}, - {file = "pyzmq-23.2.1-cp310-cp310-win32.whl", hash = "sha256:e1fe30bcd5aea5948c42685fad910cd285eacb2518ea4dc6c170d6b535bee95d"}, - {file = "pyzmq-23.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:650389bbfca73955b262b2230423d89992f38ec48033307ae80e700eaa2fbb63"}, - {file = "pyzmq-23.2.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:e753eee6d3b93c5354e8ba0a1d62956ee49355f0a36e00570823ef64e66183f5"}, - {file = "pyzmq-23.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f07016e3cf088dbfc6e7c5a7b3f540db5c23b0190d539e4fd3e2b5e6beffa4b5"}, - {file = "pyzmq-23.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4805af9614b0b41b7e57d17673459facf85604dac502a5a9244f6e8c9a4de658"}, - {file = "pyzmq-23.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39dd252b683816935702825e5bf775df16090619ced9bb4ba68c2d0b6f0c9b18"}, - {file = "pyzmq-23.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:84678153432241bcdca2210cf4ff83560b200556867aea913ffbb960f5d5f340"}, - {file = "pyzmq-23.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:90d88f9d9a2ae6cfb1dc4ea2d1710cdf6456bc1b9a06dd1bb485c5d298f2517e"}, - {file = "pyzmq-23.2.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:794871988c34727c7f79bdfe2546e6854ae1fa2e1feb382784f23a9c6c63ecb3"}, - {file = "pyzmq-23.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c56b1a62a1fb87565343c57b6743fd5da6e138b8c6562361d7d9b5ce4acf399a"}, - {file = "pyzmq-23.2.1-cp311-cp311-win32.whl", hash = "sha256:c3ebf1668664d20c8f7d468955f18379b7d1f7bc8946b13243d050fa3888c7ff"}, - {file = "pyzmq-23.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:ec9803aca9491fd6f0d853d2a6147f19f8deaaa23b1b713d05c5d09e56ea7142"}, - {file = "pyzmq-23.2.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:385609812eafd9970c3752c51f2f6c4f224807e3e441bcfd8c8273877d00c8a8"}, - {file = "pyzmq-23.2.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b861db65f6b8906c8d6db51dde2448f266f0c66bf28db2c37aea50f58a849859"}, - {file = "pyzmq-23.2.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6b1e79bba24f6df1712e3188d5c32c480d8eda03e8ecff44dc8ecb0805fa62f3"}, - {file = "pyzmq-23.2.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8dc66f109a245653b19df0f44a5af7a3f14cb8ad6c780ead506158a057bd36ce"}, - {file = "pyzmq-23.2.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:b815991c7d024bf461f358ad871f2be1135576274caed5749c4828859e40354e"}, - {file = "pyzmq-23.2.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:29b74774a0bfd3c4d98ac853f0bdca55bd9ec89d5b0def5486407cca54472ef8"}, - {file = "pyzmq-23.2.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:4bb798bef181648827019001f6be43e1c48b34b477763b37a8d27d8c06d197b8"}, - {file = "pyzmq-23.2.1-cp36-cp36m-win32.whl", hash = "sha256:565bd5ab81f6964fc4067ccf2e00877ad0fa917308975694bbb54378389215f8"}, - {file = "pyzmq-23.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:1f368a82b29f80071781b20663c0fc0c8f6b13273f9f5abe1526af939534f90f"}, - {file = "pyzmq-23.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c9cfaf530e6a7ff65f0afe275e99f983f68b54dfb23ea401f0bc297a632766b6"}, - {file = "pyzmq-23.2.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c558b50402fca1acc94329c5d8f12aa429738904a5cfb32b9ed3c61235221bb"}, - {file = "pyzmq-23.2.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20bafc4095eab00f41a510579363a3f5e1f5c69d7ee10f1d88895c4df0259183"}, - {file = "pyzmq-23.2.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f619fd38fc2641abfb53cca719c165182500600b82c695cc548a0f05f764be05"}, - {file = "pyzmq-23.2.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:044447ae4b2016a6b8697571fd633f799f860b19b76c4a2fd9b1140d52ee6745"}, - {file = "pyzmq-23.2.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:49d30ba7074f469e8167917abf9eb854c6503ae10153034a6d4df33618f1db5f"}, - {file = "pyzmq-23.2.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:48400b96788cdaca647021bf19a9cd668384f46e4d9c55cf045bdd17f65299c8"}, - {file = "pyzmq-23.2.1-cp37-cp37m-win32.whl", hash = "sha256:8a68f57b7a3f7b6b52ada79876be1efb97c8c0952423436e84d70cc139f16f0d"}, - {file = "pyzmq-23.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9e5bf6e7239fc9687239de7a283aa8b801ab85371116045b33ae20132a1325d6"}, - {file = "pyzmq-23.2.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:0ff6294e001129a9f22dcbfba186165c7e6f573c46de2704d76f873c94c65416"}, - {file = "pyzmq-23.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ffc6b1623d0f9affb351db4ca61f432dca3628a5ee015f9bf2bfbe9c6836881c"}, - {file = "pyzmq-23.2.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4d6f110c56f7d5b4d64dde3a382ae61b6d48174e30742859d8e971b18b6c9e5c"}, - {file = "pyzmq-23.2.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9269fbfe3a4eb2009199120861c4571ef1655fdf6951c3e7f233567c94e8c602"}, - {file = "pyzmq-23.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12e62ff0d5223ec09b597ab6d73858b9f64a51221399f3cb08aa495e1dff7935"}, - {file = "pyzmq-23.2.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6fd5d0d50cbcf4bc376861529a907bed026a4cbe8c22a500ff8243231ef02433"}, - {file = "pyzmq-23.2.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9d0ab2936085c85a1fc6f9fd8f89d5235ae99b051e90ec5baa5e73ad44346e1f"}, - {file = "pyzmq-23.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:022cf5ea7bcaa8a06a03c2706e0ae66904b6138b2155577cd34c64bc7cc637ab"}, - {file = "pyzmq-23.2.1-cp38-cp38-win32.whl", hash = "sha256:28dbdb90b2f6b131f8f10e6081012e4e25234213433420e67e0c1162de537113"}, - {file = "pyzmq-23.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:10d1910ec381b851aeb024a042a13db178cb1edf125e76a4e9d2548ad103aadb"}, - {file = "pyzmq-23.2.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:99a5a77a10863493a1ee8dece02578c6b32025fb3afff91b40476bc489e81648"}, - {file = "pyzmq-23.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:aecd6ceaccc4b594e0092d6513ef3f1c0fa678dd89f86bb8ff1a47014b8fca35"}, - {file = "pyzmq-23.2.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:415ff62ac525d9add1e3550430a09b9928d2d24a20cc4ce809e67caac41219ab"}, - {file = "pyzmq-23.2.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:67975a9e1237b9ccc78f457bef17691bbdd2055a9d26e81ee914ba376846d0ce"}, - {file = "pyzmq-23.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e106b64bad744fe469dc3dd864f2764d66399178c1bf39d45294cc7980f14f"}, - {file = "pyzmq-23.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8c842109d31a9281d678f668629241c405928afbebd913c48a5a8e7aee61f63d"}, - {file = "pyzmq-23.2.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fefdf9b685fda4141b95ebec975946076a5e0723ff70b037032b2085c5317684"}, - {file = "pyzmq-23.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:79a87831b47a9f6161ad23fa5e89d5469dc585abc49f90b9b07fea8905ae1234"}, - {file = "pyzmq-23.2.1-cp39-cp39-win32.whl", hash = "sha256:342ca3077f47ec2ee41b9825142b614e03e026347167cbc72a59b618c4f6106c"}, - {file = "pyzmq-23.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:5e05492be125dce279721d6b54fd1b956546ecc4bcdfcf8e7b4c413bc0874c10"}, - {file = "pyzmq-23.2.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:07ed8aaf7ffe150af873269690cc654ffeca7491f62aae0f3821baa181f8d5fe"}, - {file = "pyzmq-23.2.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ad28ddb40db8e450d7d4bf8a1d765d3f87b63b10e7e9a825a3c130c6371a8c03"}, - {file = "pyzmq-23.2.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2f67b63f53c6994d601404fd1a329e6d940ac3dd1d92946a93b2b9c70df67b9f"}, - {file = "pyzmq-23.2.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c890309296f53f9aa32ffcfc51d805705e1982bffd27c9692a8f1e1b8de279f4"}, - {file = "pyzmq-23.2.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:624fd38071a817644acdae075b92a23ea0bdd126a58148288e8284d23ec361ce"}, - {file = "pyzmq-23.2.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a114992a193577cb62233abf8cb2832970f9975805a64740e325d2f895e7f85a"}, - {file = "pyzmq-23.2.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c780acddd2934c6831ff832ecbf78a45a7b62d4eb216480f863854a8b7d54fa7"}, - {file = "pyzmq-23.2.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d904f6595acfaaf99a1a61881fea068500c40374d263e5e073aa4005e5f9c28a"}, - {file = "pyzmq-23.2.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:929d548b74c0f82f7f95b54e4a43f9e4ce2523cfb8a54d3f7141e45652304b2a"}, - {file = "pyzmq-23.2.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f392cbea531b7142d1958c0d4a0c9c8d760dc451e5848d8dd3387804d3e3e62c"}, - {file = "pyzmq-23.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a0f09d85c45f58aa8e715b42f8b26beba68b3b63a8f7049113478aca26efbc30"}, - {file = "pyzmq-23.2.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23e708fbfdf4ee3107422b69ca65da1b9f056b431fc0888096a8c1d6cd908e8f"}, - {file = "pyzmq-23.2.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35e635343ff367f697d00fa1484262bb68e36bc74c9b80737eac5a1e04c4e1b1"}, - {file = "pyzmq-23.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efb9e38b2a590282704269585de7eb33bf43dc294cad092e1b172e23d4c217e5"}, - {file = "pyzmq-23.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:407f909c4e8fde62fbdad9ebd448319792258cc0550c2815567a4d9d8d9e6d18"}, - {file = "pyzmq-23.2.1.tar.gz", hash = "sha256:2b381aa867ece7d0a82f30a0c7f3d4387b7cf2e0697e33efaa5bed6c5784abcd"}, -] - -[package.dependencies] -cffi = {version = "*", markers = "implementation_name == \"pypy\""} -py = {version = "*", markers = "implementation_name == \"pypy\""} - -[[package]] -name = "regex" -version = "2024.5.15" -description = "Alternative regular expression module, to replace re." -optional = false -python-versions = ">=3.8" -files = [ - {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a81e3cfbae20378d75185171587cbf756015ccb14840702944f014e0d93ea09f"}, - {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b59138b219ffa8979013be7bc85bb60c6f7b7575df3d56dc1e403a438c7a3f6"}, - {file = "regex-2024.5.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0bd000c6e266927cb7a1bc39d55be95c4b4f65c5be53e659537537e019232b1"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eaa7ddaf517aa095fa8da0b5015c44d03da83f5bd49c87961e3c997daed0de7"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba68168daedb2c0bab7fd7e00ced5ba90aebf91024dea3c88ad5063c2a562cca"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e8d717bca3a6e2064fc3a08df5cbe366369f4b052dcd21b7416e6d71620dca1"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1337b7dbef9b2f71121cdbf1e97e40de33ff114801263b275aafd75303bd62b5"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9ebd0a36102fcad2f03696e8af4ae682793a5d30b46c647eaf280d6cfb32796"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9efa1a32ad3a3ea112224897cdaeb6aa00381627f567179c0314f7b65d354c62"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1595f2d10dff3d805e054ebdc41c124753631b6a471b976963c7b28543cf13b0"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b802512f3e1f480f41ab5f2cfc0e2f761f08a1f41092d6718868082fc0d27143"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a0981022dccabca811e8171f913de05720590c915b033b7e601f35ce4ea7019f"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:19068a6a79cf99a19ccefa44610491e9ca02c2be3305c7760d3831d38a467a6f"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1b5269484f6126eee5e687785e83c6b60aad7663dafe842b34691157e5083e53"}, - {file = "regex-2024.5.15-cp310-cp310-win32.whl", hash = "sha256:ada150c5adfa8fbcbf321c30c751dc67d2f12f15bd183ffe4ec7cde351d945b3"}, - {file = "regex-2024.5.15-cp310-cp310-win_amd64.whl", hash = "sha256:ac394ff680fc46b97487941f5e6ae49a9f30ea41c6c6804832063f14b2a5a145"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f5b1dff3ad008dccf18e652283f5e5339d70bf8ba7c98bf848ac33db10f7bc7a"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c6a2b494a76983df8e3d3feea9b9ffdd558b247e60b92f877f93a1ff43d26656"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a32b96f15c8ab2e7d27655969a23895eb799de3665fa94349f3b2fbfd547236f"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10002e86e6068d9e1c91eae8295ef690f02f913c57db120b58fdd35a6bb1af35"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec54d5afa89c19c6dd8541a133be51ee1017a38b412b1321ccb8d6ddbeb4cf7d"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10e4ce0dca9ae7a66e6089bb29355d4432caed736acae36fef0fdd7879f0b0cb"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e507ff1e74373c4d3038195fdd2af30d297b4f0950eeda6f515ae3d84a1770f"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1f059a4d795e646e1c37665b9d06062c62d0e8cc3c511fe01315973a6542e40"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0721931ad5fe0dda45d07f9820b90b2148ccdd8e45bb9e9b42a146cb4f695649"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:833616ddc75ad595dee848ad984d067f2f31be645d603e4d158bba656bbf516c"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:287eb7f54fc81546346207c533ad3c2c51a8d61075127d7f6d79aaf96cdee890"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:19dfb1c504781a136a80ecd1fff9f16dddf5bb43cec6871778c8a907a085bb3d"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:119af6e56dce35e8dfb5222573b50c89e5508d94d55713c75126b753f834de68"}, - {file = "regex-2024.5.15-cp311-cp311-win32.whl", hash = "sha256:1c1c174d6ec38d6c8a7504087358ce9213d4332f6293a94fbf5249992ba54efa"}, - {file = "regex-2024.5.15-cp311-cp311-win_amd64.whl", hash = "sha256:9e717956dcfd656f5055cc70996ee2cc82ac5149517fc8e1b60261b907740201"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:632b01153e5248c134007209b5c6348a544ce96c46005d8456de1d552455b014"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e64198f6b856d48192bf921421fdd8ad8eb35e179086e99e99f711957ffedd6e"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68811ab14087b2f6e0fc0c2bae9ad689ea3584cad6917fc57be6a48bbd012c49"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ec0c2fea1e886a19c3bee0cd19d862b3aa75dcdfb42ebe8ed30708df64687a"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0c0c0003c10f54a591d220997dd27d953cd9ccc1a7294b40a4be5312be8797b"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2431b9e263af1953c55abbd3e2efca67ca80a3de8a0437cb58e2421f8184717a"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a605586358893b483976cffc1723fb0f83e526e8f14c6e6614e75919d9862cf"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391d7f7f1e409d192dba8bcd42d3e4cf9e598f3979cdaed6ab11288da88cb9f2"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9ff11639a8d98969c863d4617595eb5425fd12f7c5ef6621a4b74b71ed8726d5"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4eee78a04e6c67e8391edd4dad3279828dd66ac4b79570ec998e2155d2e59fd5"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8fe45aa3f4aa57faabbc9cb46a93363edd6197cbc43523daea044e9ff2fea83e"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d0a3d8d6acf0c78a1fff0e210d224b821081330b8524e3e2bc5a68ef6ab5803d"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c486b4106066d502495b3025a0a7251bf37ea9540433940a23419461ab9f2a80"}, - {file = "regex-2024.5.15-cp312-cp312-win32.whl", hash = "sha256:c49e15eac7c149f3670b3e27f1f28a2c1ddeccd3a2812cba953e01be2ab9b5fe"}, - {file = "regex-2024.5.15-cp312-cp312-win_amd64.whl", hash = "sha256:673b5a6da4557b975c6c90198588181029c60793835ce02f497ea817ff647cb2"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87e2a9c29e672fc65523fb47a90d429b70ef72b901b4e4b1bd42387caf0d6835"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3bea0ba8b73b71b37ac833a7f3fd53825924165da6a924aec78c13032f20850"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfc4f82cabe54f1e7f206fd3d30fda143f84a63fe7d64a81558d6e5f2e5aaba9"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5bb9425fe881d578aeca0b2b4b3d314ec88738706f66f219c194d67179337cb"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64c65783e96e563103d641760664125e91bd85d8e49566ee560ded4da0d3e704"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf2430df4148b08fb4324b848672514b1385ae3807651f3567871f130a728cc3"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5397de3219a8b08ae9540c48f602996aa6b0b65d5a61683e233af8605c42b0f2"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:455705d34b4154a80ead722f4f185b04c4237e8e8e33f265cd0798d0e44825fa"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2b6f1b3bb6f640c1a92be3bbfbcb18657b125b99ecf141fb3310b5282c7d4ed"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3ad070b823ca5890cab606c940522d05d3d22395d432f4aaaf9d5b1653e47ced"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5b5467acbfc153847d5adb21e21e29847bcb5870e65c94c9206d20eb4e99a384"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e6662686aeb633ad65be2a42b4cb00178b3fbf7b91878f9446075c404ada552f"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:2b4c884767504c0e2401babe8b5b7aea9148680d2e157fa28f01529d1f7fcf67"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3cd7874d57f13bf70078f1ff02b8b0aa48d5b9ed25fc48547516c6aba36f5741"}, - {file = "regex-2024.5.15-cp38-cp38-win32.whl", hash = "sha256:e4682f5ba31f475d58884045c1a97a860a007d44938c4c0895f41d64481edbc9"}, - {file = "regex-2024.5.15-cp38-cp38-win_amd64.whl", hash = "sha256:d99ceffa25ac45d150e30bd9ed14ec6039f2aad0ffa6bb87a5936f5782fc1569"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13cdaf31bed30a1e1c2453ef6015aa0983e1366fad2667657dbcac7b02f67133"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cac27dcaa821ca271855a32188aa61d12decb6fe45ffe3e722401fe61e323cd1"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7dbe2467273b875ea2de38ded4eba86cbcbc9a1a6d0aa11dcf7bd2e67859c435"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64f18a9a3513a99c4bef0e3efd4c4a5b11228b48aa80743be822b71e132ae4f5"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d347a741ea871c2e278fde6c48f85136c96b8659b632fb57a7d1ce1872547600"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1878b8301ed011704aea4c806a3cadbd76f84dece1ec09cc9e4dc934cfa5d4da"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4babf07ad476aaf7830d77000874d7611704a7fcf68c9c2ad151f5d94ae4bfc4"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35cb514e137cb3488bce23352af3e12fb0dbedd1ee6e60da053c69fb1b29cc6c"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cdd09d47c0b2efee9378679f8510ee6955d329424c659ab3c5e3a6edea696294"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:72d7a99cd6b8f958e85fc6ca5b37c4303294954eac1376535b03c2a43eb72629"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a094801d379ab20c2135529948cb84d417a2169b9bdceda2a36f5f10977ebc16"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c0c18345010870e58238790a6779a1219b4d97bd2e77e1140e8ee5d14df071aa"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:16093f563098448ff6b1fa68170e4acbef94e6b6a4e25e10eae8598bb1694b5d"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e38a7d4e8f633a33b4c7350fbd8bad3b70bf81439ac67ac38916c4a86b465456"}, - {file = "regex-2024.5.15-cp39-cp39-win32.whl", hash = "sha256:71a455a3c584a88f654b64feccc1e25876066c4f5ef26cd6dd711308aa538694"}, - {file = "regex-2024.5.15-cp39-cp39-win_amd64.whl", hash = "sha256:cab12877a9bdafde5500206d1020a584355a97884dfd388af3699e9137bf7388"}, - {file = "regex-2024.5.15.tar.gz", hash = "sha256:d3ee02d9e5f482cc8309134a91eeaacbdd2261ba111b0fef3748eeb4913e6a2c"}, -] - -[[package]] -name = "requests" -version = "2.32.3" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "resampy" -version = "0.4.3" -description = "Efficient signal resampling" -optional = false -python-versions = "*" -files = [ - {file = "resampy-0.4.3-py3-none-any.whl", hash = "sha256:ad2ed64516b140a122d96704e32bc0f92b23f45419e8b8f478e5a05f83edcebd"}, - {file = "resampy-0.4.3.tar.gz", hash = "sha256:a0d1c28398f0e55994b739650afef4e3974115edbe96cd4bb81968425e916e47"}, -] - -[package.dependencies] -numba = ">=0.53" -numpy = ">=1.17" - -[package.extras] -design = ["optuna (>=2.10.0)"] -docs = ["numpydoc", "sphinx (!=1.3.1)"] -tests = ["pytest (<8)", "pytest-cov", "scipy (>=1.1)"] - -[[package]] -name = "scikit-image" -version = "0.20.0" -description = "Image processing in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "scikit_image-0.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3cec8c5e8412ee19642a916648144186eb6b60c39fb6608ab478b4d1a4575e25"}, - {file = "scikit_image-0.20.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:0ab378822fadc93db7e917a266d489ea33df3b42edfef197caaebbabbc2e4ecc"}, - {file = "scikit_image-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6797e3ef5fc53897bde131cfc3ceba6ce247d89cfe194fc8d3aba7f5c12aaf6"}, - {file = "scikit_image-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f667dcf01737248bc5bd0a99fad58475abeb6b6a8229aecee9fdb96cf988ae85"}, - {file = "scikit_image-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:79a400ffe35fc7f64d1d043f3d043e062015689ad5637c35cd5569edae87ae13"}, - {file = "scikit_image-0.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:049d955869620453b9e0568c2da62c8fec47bf3714be48b5d46bbaebb91bdc1f"}, - {file = "scikit_image-0.20.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:a503ee85b444234ee88f34bf8674872dc37c6124ff60b7eb9242813de012ff4e"}, - {file = "scikit_image-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3943d7355d02b40c066fd87cd5fe1b4f6637a16448e62333c4191a65ebf40a1c"}, - {file = "scikit_image-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d719242ea7e7250d49e38d1e33c44c2dd59c3414ae085881d168b98cbb6059a"}, - {file = "scikit_image-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:fdd1fd258e78c86e382fd687177431088a40880bd785e0ab40ee5f3794366710"}, - {file = "scikit_image-0.20.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1cd0486cb769d906307a3ec3884630be822d8ec2f41069e197336f904f584a33"}, - {file = "scikit_image-0.20.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:2e9026161d0a698f532352dda6455a0bc13b1c9d831ea9279726b59d064df574"}, - {file = "scikit_image-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c123e6b0677dc1697c04b5bf2efb7110bcca511b4bc6967a38fa395ae5edf44"}, - {file = "scikit_image-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76f2fd12b537daea806a078df9ea76f5cc5a529d5bd7c41d7d0a101e9c5f91c4"}, - {file = "scikit_image-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:2118d610096754bca44b5d37328e1382e5fa7c6493803685100c9238e257d848"}, - {file = "scikit_image-0.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:13a5c1c81ee5bcb64ee8ca8f1a2cf371b0c4345ea6fb67c3052e1c6d5edbd936"}, - {file = "scikit_image-0.20.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:1794889d2dbb385c7ad5656363371ba0057b7a3335cda093a11415af84bb96e2"}, - {file = "scikit_image-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df14f8a55dae511749b081d9402ea215ea7c641bd6f74f06aa7b623e132817df"}, - {file = "scikit_image-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b856efc75e3051bea6d40a8ffcdaabd5682783ece1aa91c3f6777c3372a98ca1"}, - {file = "scikit_image-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:a600374394b76b7fc260cef54e1be21047c4de0ecffb0b7f2f7392cd8ba16ffa"}, - {file = "scikit_image-0.20.0.tar.gz", hash = "sha256:2cd784fce18bd31d71ade62c6221440199ead03acf7544086261ee032264cf61"}, -] - -[package.dependencies] -imageio = ">=2.4.1" -lazy_loader = ">=0.1" -networkx = ">=2.8" -numpy = ">=1.21.1" -packaging = ">=20.0" -pillow = ">=9.0.1" -PyWavelets = ">=1.1.1" -scipy = {version = ">=1.8", markers = "python_version > \"3.9\""} -tifffile = ">=2019.7.26" - -[package.extras] -build = ["Cython (>=0.29.24)", "build", "meson-python (>=0.13.0rc0)", "ninja", "numpy (>=1.21.1)", "packaging (>=20)", "pythran", "setuptools (>=67)", "wheel"] -data = ["pooch (>=1.3.0)"] -default = ["PyWavelets (>=1.1.1)", "imageio (>=2.4.1)", "lazy_loader (>=0.1)", "networkx (>=2.8)", "numpy (>=1.21.1)", "packaging (>=20.0)", "pillow (>=9.0.1)", "scipy (>=1.8)", "scipy (>=1.8,<1.9.2)", "tifffile (>=2019.7.26)"] -developer = ["pre-commit", "rtoml"] -docs = ["dask[array] (>=2022.9.2)", "ipywidgets", "kaleido", "matplotlib (>=3.6)", "myst-parser", "numpydoc (>=1.5)", "pandas (>=1.5)", "plotly (>=5.10)", "pooch (>=1.6)", "pytest-runner", "scikit-learn", "seaborn (>=0.11)", "sphinx (>=5.2)", "sphinx-copybutton", "sphinx-gallery (>=0.11)", "tifffile (>=2022.8.12)"] -optional = ["SimpleITK", "astropy (>=3.1.2)", "cloudpickle (>=0.2.1)", "dask[array] (>=1.0.0,!=2.17.0)", "matplotlib (>=3.3)", "pooch (>=1.3.0)", "pyamg"] -test = ["asv", "codecov", "matplotlib (>=3.3)", "pooch (>=1.3.0)", "pytest (>=5.2.0)", "pytest-cov (>=2.7.0)", "pytest-faulthandler", "pytest-localserver"] - -[[package]] -name = "scikit-learn" -version = "1.5.1" -description = "A set of python modules for machine learning and data mining" -optional = false -python-versions = ">=3.9" -files = [ - {file = "scikit_learn-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:781586c414f8cc58e71da4f3d7af311e0505a683e112f2f62919e3019abd3745"}, - {file = "scikit_learn-1.5.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:f5b213bc29cc30a89a3130393b0e39c847a15d769d6e59539cd86b75d276b1a7"}, - {file = "scikit_learn-1.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ff4ba34c2abff5ec59c803ed1d97d61b036f659a17f55be102679e88f926fac"}, - {file = "scikit_learn-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:161808750c267b77b4a9603cf9c93579c7a74ba8486b1336034c2f1579546d21"}, - {file = "scikit_learn-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:10e49170691514a94bb2e03787aa921b82dbc507a4ea1f20fd95557862c98dc1"}, - {file = "scikit_learn-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:154297ee43c0b83af12464adeab378dee2d0a700ccd03979e2b821e7dd7cc1c2"}, - {file = "scikit_learn-1.5.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:b5e865e9bd59396220de49cb4a57b17016256637c61b4c5cc81aaf16bc123bbe"}, - {file = "scikit_learn-1.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:909144d50f367a513cee6090873ae582dba019cb3fca063b38054fa42704c3a4"}, - {file = "scikit_learn-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689b6f74b2c880276e365fe84fe4f1befd6a774f016339c65655eaff12e10cbf"}, - {file = "scikit_learn-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:9a07f90846313a7639af6a019d849ff72baadfa4c74c778821ae0fad07b7275b"}, - {file = "scikit_learn-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5944ce1faada31c55fb2ba20a5346b88e36811aab504ccafb9f0339e9f780395"}, - {file = "scikit_learn-1.5.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:0828673c5b520e879f2af6a9e99eee0eefea69a2188be1ca68a6121b809055c1"}, - {file = "scikit_learn-1.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:508907e5f81390e16d754e8815f7497e52139162fd69c4fdbd2dfa5d6cc88915"}, - {file = "scikit_learn-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97625f217c5c0c5d0505fa2af28ae424bd37949bb2f16ace3ff5f2f81fb4498b"}, - {file = "scikit_learn-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:da3f404e9e284d2b0a157e1b56b6566a34eb2798205cba35a211df3296ab7a74"}, - {file = "scikit_learn-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:88e0672c7ac21eb149d409c74cc29f1d611d5158175846e7a9c2427bd12b3956"}, - {file = "scikit_learn-1.5.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:7b073a27797a283187a4ef4ee149959defc350b46cbf63a84d8514fe16b69855"}, - {file = "scikit_learn-1.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b59e3e62d2be870e5c74af4e793293753565c7383ae82943b83383fdcf5cc5c1"}, - {file = "scikit_learn-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bd8d3a19d4bd6dc5a7d4f358c8c3a60934dc058f363c34c0ac1e9e12a31421d"}, - {file = "scikit_learn-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:5f57428de0c900a98389c4a433d4a3cf89de979b3aa24d1c1d251802aa15e44d"}, - {file = "scikit_learn-1.5.1.tar.gz", hash = "sha256:0ea5d40c0e3951df445721927448755d3fe1d80833b0b7308ebff5d2a45e6414"}, -] - -[package.dependencies] -joblib = ">=1.2.0" -numpy = ">=1.19.5" -scipy = ">=1.6.0" -threadpoolctl = ">=3.1.0" - -[package.extras] -benchmark = ["matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "pandas (>=1.1.5)"] -build = ["cython (>=3.0.10)", "meson-python (>=0.16.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)"] -docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "polars (>=0.20.23)", "pooch (>=1.6.0)", "pydata-sphinx-theme (>=0.15.3)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=7.3.7)", "sphinx-copybutton (>=0.5.2)", "sphinx-design (>=0.5.0)", "sphinx-gallery (>=0.16.0)", "sphinx-prompt (>=1.4.0)", "sphinx-remove-toctrees (>=1.0.0.post1)", "sphinxcontrib-sass (>=0.3.4)", "sphinxext-opengraph (>=0.9.1)"] -examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] -install = ["joblib (>=1.2.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)", "threadpoolctl (>=3.1.0)"] -maintenance = ["conda-lock (==2.5.6)"] -tests = ["black (>=24.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.9)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.20.23)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.2.1)", "scikit-image (>=0.17.2)"] - -[[package]] -name = "scipy" -version = "1.12.0" -description = "Fundamental algorithms for scientific computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "scipy-1.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:78e4402e140879387187f7f25d91cc592b3501a2e51dfb320f48dfb73565f10b"}, - {file = "scipy-1.12.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:f5f00ebaf8de24d14b8449981a2842d404152774c1a1d880c901bf454cb8e2a1"}, - {file = "scipy-1.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e53958531a7c695ff66c2e7bb7b79560ffdc562e2051644c5576c39ff8efb563"}, - {file = "scipy-1.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e32847e08da8d895ce09d108a494d9eb78974cf6de23063f93306a3e419960c"}, - {file = "scipy-1.12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4c1020cad92772bf44b8e4cdabc1df5d87376cb219742549ef69fc9fd86282dd"}, - {file = "scipy-1.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:75ea2a144096b5e39402e2ff53a36fecfd3b960d786b7efd3c180e29c39e53f2"}, - {file = "scipy-1.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:408c68423f9de16cb9e602528be4ce0d6312b05001f3de61fe9ec8b1263cad08"}, - {file = "scipy-1.12.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5adfad5dbf0163397beb4aca679187d24aec085343755fcdbdeb32b3679f254c"}, - {file = "scipy-1.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3003652496f6e7c387b1cf63f4bb720951cfa18907e998ea551e6de51a04467"}, - {file = "scipy-1.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b8066bce124ee5531d12a74b617d9ac0ea59245246410e19bca549656d9a40a"}, - {file = "scipy-1.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8bee4993817e204d761dba10dbab0774ba5a8612e57e81319ea04d84945375ba"}, - {file = "scipy-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:a24024d45ce9a675c1fb8494e8e5244efea1c7a09c60beb1eeb80373d0fecc70"}, - {file = "scipy-1.12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e7e76cc48638228212c747ada851ef355c2bb5e7f939e10952bc504c11f4e372"}, - {file = "scipy-1.12.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:f7ce148dffcd64ade37b2df9315541f9adad6efcaa86866ee7dd5db0c8f041c3"}, - {file = "scipy-1.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c39f92041f490422924dfdb782527a4abddf4707616e07b021de33467f917bc"}, - {file = "scipy-1.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7ebda398f86e56178c2fa94cad15bf457a218a54a35c2a7b4490b9f9cb2676c"}, - {file = "scipy-1.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:95e5c750d55cf518c398a8240571b0e0782c2d5a703250872f36eaf737751338"}, - {file = "scipy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e646d8571804a304e1da01040d21577685ce8e2db08ac58e543eaca063453e1c"}, - {file = "scipy-1.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:913d6e7956c3a671de3b05ccb66b11bc293f56bfdef040583a7221d9e22a2e35"}, - {file = "scipy-1.12.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba1b0c7256ad75401c73e4b3cf09d1f176e9bd4248f0d3112170fb2ec4db067"}, - {file = "scipy-1.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:730badef9b827b368f351eacae2e82da414e13cf8bd5051b4bdfd720271a5371"}, - {file = "scipy-1.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6546dc2c11a9df6926afcbdd8a3edec28566e4e785b915e849348c6dd9f3f490"}, - {file = "scipy-1.12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:196ebad3a4882081f62a5bf4aeb7326aa34b110e533aab23e4374fcccb0890dc"}, - {file = "scipy-1.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:b360f1b6b2f742781299514e99ff560d1fe9bd1bff2712894b52abe528d1fd1e"}, - {file = "scipy-1.12.0.tar.gz", hash = "sha256:4bf5abab8a36d20193c698b0f1fc282c1d083c94723902c447e5d2f1780936a3"}, -] - -[package.dependencies] -numpy = ">=1.22.4,<1.29.0" - -[package.extras] -dev = ["click", "cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] -doc = ["jupytext", "matplotlib (>2)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] -test = ["asv", "gmpy2", "hypothesis", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] - -[[package]] -name = "setuptools" -version = "70.3.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-70.3.0-py3-none-any.whl", hash = "sha256:fe384da74336c398e0d956d1cae0669bc02eed936cdb1d49b57de1990dc11ffc"}, - {file = "setuptools-70.3.0.tar.gz", hash = "sha256:f171bab1dfbc86b132997f26a119f6056a57950d058587841a0082e8830f9dc5"}, -] - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "sh" -version = "1.14.3" -description = "Python subprocess replacement" -optional = false -python-versions = "*" -files = [ - {file = "sh-1.14.3.tar.gz", hash = "sha256:e4045b6c732d9ce75d571c79f5ac2234edd9ae4f5fa9d59b09705082bdca18c7"}, -] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "soundfile" -version = "0.12.1" -description = "An audio library based on libsndfile, CFFI and NumPy" -optional = false -python-versions = "*" -files = [ - {file = "soundfile-0.12.1-py2.py3-none-any.whl", hash = "sha256:828a79c2e75abab5359f780c81dccd4953c45a2c4cd4f05ba3e233ddf984b882"}, - {file = "soundfile-0.12.1-py2.py3-none-macosx_10_9_x86_64.whl", hash = "sha256:d922be1563ce17a69582a352a86f28ed8c9f6a8bc951df63476ffc310c064bfa"}, - {file = "soundfile-0.12.1-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:bceaab5c4febb11ea0554566784bcf4bc2e3977b53946dda2b12804b4fe524a8"}, - {file = "soundfile-0.12.1-py2.py3-none-manylinux_2_17_x86_64.whl", hash = "sha256:2dc3685bed7187c072a46ab4ffddd38cef7de9ae5eb05c03df2ad569cf4dacbc"}, - {file = "soundfile-0.12.1-py2.py3-none-manylinux_2_31_x86_64.whl", hash = "sha256:074247b771a181859d2bc1f98b5ebf6d5153d2c397b86ee9e29ba602a8dfe2a6"}, - {file = "soundfile-0.12.1-py2.py3-none-win32.whl", hash = "sha256:59dfd88c79b48f441bbf6994142a19ab1de3b9bb7c12863402c2bc621e49091a"}, - {file = "soundfile-0.12.1-py2.py3-none-win_amd64.whl", hash = "sha256:0d86924c00b62552b650ddd28af426e3ff2d4dc2e9047dae5b3d8452e0a49a77"}, - {file = "soundfile-0.12.1.tar.gz", hash = "sha256:e8e1017b2cf1dda767aef19d2fd9ee5ebe07e050d430f77a0a7c66ba08b8cdae"}, -] - -[package.dependencies] -cffi = ">=1.0" - -[package.extras] -numpy = ["numpy"] - -[[package]] -name = "sympy" -version = "1.13.0" -description = "Computer algebra system (CAS) in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "sympy-1.13.0-py3-none-any.whl", hash = "sha256:6b0b32a4673fb91bd3cac3b55406c8e01d53ae22780be467301cc452f6680c92"}, - {file = "sympy-1.13.0.tar.gz", hash = "sha256:3b6af8f4d008b9a1a6a4268b335b984b23835f26d1d60b0526ebc71d48a25f57"}, -] - -[package.dependencies] -mpmath = ">=1.1.0,<1.4" - -[package.extras] -dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"] - -[[package]] -name = "tbb" -version = "2021.13.0" -description = "Intel® oneAPI Threading Building Blocks (oneTBB)" -optional = false -python-versions = "*" -files = [ - {file = "tbb-2021.13.0-py2.py3-none-manylinux1_i686.whl", hash = "sha256:a2567725329639519d46d92a2634cf61e76601dac2f777a05686fea546c4fe4f"}, - {file = "tbb-2021.13.0-py2.py3-none-manylinux1_x86_64.whl", hash = "sha256:aaf667e92849adb012b8874d6393282afc318aca4407fc62f912ee30a22da46a"}, - {file = "tbb-2021.13.0-py3-none-win32.whl", hash = "sha256:6669d26703e9943f6164c6407bd4a237a45007e79b8d3832fe6999576eaaa9ef"}, - {file = "tbb-2021.13.0-py3-none-win_amd64.whl", hash = "sha256:3528a53e4bbe64b07a6112b4c5a00ff3c61924ee46c9c68e004a1ac7ad1f09c3"}, -] - -[[package]] -name = "threadpoolctl" -version = "3.5.0" -description = "threadpoolctl" -optional = false -python-versions = ">=3.8" -files = [ - {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, - {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, -] - -[[package]] -name = "tifffile" -version = "2024.7.2" -description = "Read and write TIFF files" -optional = false -python-versions = ">=3.9" -files = [ - {file = "tifffile-2024.7.2-py3-none-any.whl", hash = "sha256:5a2ee608c9cc1f2e044d943dacebddc71d4827b6fad150ef4c644b7aefbe2d1a"}, - {file = "tifffile-2024.7.2.tar.gz", hash = "sha256:02e52e8872c0e9943add686d2fd8bcfb18f0a824760882cf5e35fcbc2c80e32c"}, -] - -[package.dependencies] -numpy = "*" - -[package.extras] -all = ["defusedxml", "fsspec", "imagecodecs (>=2023.8.12)", "lxml", "matplotlib", "zarr"] - -[[package]] -name = "tokenizers" -version = "0.13.3" -description = "Fast and Customizable Tokenizers" -optional = false -python-versions = "*" -files = [ - {file = "tokenizers-0.13.3-cp310-cp310-macosx_10_11_x86_64.whl", hash = "sha256:f3835c5be51de8c0a092058a4d4380cb9244fb34681fd0a295fbf0a52a5fdf33"}, - {file = "tokenizers-0.13.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4ef4c3e821730f2692489e926b184321e887f34fb8a6b80b8096b966ba663d07"}, - {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5fd1a6a25353e9aa762e2aae5a1e63883cad9f4e997c447ec39d071020459bc"}, - {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee0b1b311d65beab83d7a41c56a1e46ab732a9eed4460648e8eb0bd69fc2d059"}, - {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ef4215284df1277dadbcc5e17d4882bda19f770d02348e73523f7e7d8b8d396"}, - {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4d53976079cff8a033f778fb9adca2d9d69d009c02fa2d71a878b5f3963ed30"}, - {file = "tokenizers-0.13.3-cp310-cp310-win32.whl", hash = "sha256:1f0e3b4c2ea2cd13238ce43548959c118069db7579e5d40ec270ad77da5833ce"}, - {file = "tokenizers-0.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:89649c00d0d7211e8186f7a75dfa1db6996f65edce4b84821817eadcc2d3c79e"}, - {file = "tokenizers-0.13.3-cp311-cp311-macosx_10_11_universal2.whl", hash = "sha256:56b726e0d2bbc9243872b0144515ba684af5b8d8cd112fb83ee1365e26ec74c8"}, - {file = "tokenizers-0.13.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:cc5c022ce692e1f499d745af293ab9ee6f5d92538ed2faf73f9708c89ee59ce6"}, - {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f55c981ac44ba87c93e847c333e58c12abcbb377a0c2f2ef96e1a266e4184ff2"}, - {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f247eae99800ef821a91f47c5280e9e9afaeed9980fc444208d5aa6ba69ff148"}, - {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b3e3215d048e94f40f1c95802e45dcc37c5b05eb46280fc2ccc8cd351bff839"}, - {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ba2b0bf01777c9b9bc94b53764d6684554ce98551fec496f71bc5be3a03e98b"}, - {file = "tokenizers-0.13.3-cp311-cp311-win32.whl", hash = "sha256:cc78d77f597d1c458bf0ea7c2a64b6aa06941c7a99cb135b5969b0278824d808"}, - {file = "tokenizers-0.13.3-cp311-cp311-win_amd64.whl", hash = "sha256:ecf182bf59bd541a8876deccf0360f5ae60496fd50b58510048020751cf1724c"}, - {file = "tokenizers-0.13.3-cp37-cp37m-macosx_10_11_x86_64.whl", hash = "sha256:0527dc5436a1f6bf2c0327da3145687d3bcfbeab91fed8458920093de3901b44"}, - {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07cbb2c307627dc99b44b22ef05ff4473aa7c7cc1fec8f0a8b37d8a64b1a16d2"}, - {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4560dbdeaae5b7ee0d4e493027e3de6d53c991b5002d7ff95083c99e11dd5ac0"}, - {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64064bd0322405c9374305ab9b4c07152a1474370327499911937fd4a76d004b"}, - {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8c6e2ab0f2e3d939ca66aa1d596602105fe33b505cd2854a4c1717f704c51de"}, - {file = "tokenizers-0.13.3-cp37-cp37m-win32.whl", hash = "sha256:6cc29d410768f960db8677221e497226e545eaaea01aa3613fa0fdf2cc96cff4"}, - {file = "tokenizers-0.13.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fc2a7fdf864554a0dacf09d32e17c0caa9afe72baf9dd7ddedc61973bae352d8"}, - {file = "tokenizers-0.13.3-cp38-cp38-macosx_10_11_x86_64.whl", hash = "sha256:8791dedba834c1fc55e5f1521be325ea3dafb381964be20684b92fdac95d79b7"}, - {file = "tokenizers-0.13.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:d607a6a13718aeb20507bdf2b96162ead5145bbbfa26788d6b833f98b31b26e1"}, - {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3791338f809cd1bf8e4fee6b540b36822434d0c6c6bc47162448deee3f77d425"}, - {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2f35f30e39e6aab8716f07790f646bdc6e4a853816cc49a95ef2a9016bf9ce6"}, - {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310204dfed5aa797128b65d63538a9837cbdd15da2a29a77d67eefa489edda26"}, - {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0f9b92ea052305166559f38498b3b0cae159caea712646648aaa272f7160963"}, - {file = "tokenizers-0.13.3-cp38-cp38-win32.whl", hash = "sha256:9a3fa134896c3c1f0da6e762d15141fbff30d094067c8f1157b9fdca593b5806"}, - {file = "tokenizers-0.13.3-cp38-cp38-win_amd64.whl", hash = "sha256:8e7b0cdeace87fa9e760e6a605e0ae8fc14b7d72e9fc19c578116f7287bb873d"}, - {file = "tokenizers-0.13.3-cp39-cp39-macosx_10_11_x86_64.whl", hash = "sha256:00cee1e0859d55507e693a48fa4aef07060c4bb6bd93d80120e18fea9371c66d"}, - {file = "tokenizers-0.13.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:a23ff602d0797cea1d0506ce69b27523b07e70f6dda982ab8cf82402de839088"}, - {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70ce07445050b537d2696022dafb115307abdffd2a5c106f029490f84501ef97"}, - {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:280ffe95f50eaaf655b3a1dc7ff1d9cf4777029dbbc3e63a74e65a056594abc3"}, - {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97acfcec592f7e9de8cadcdcda50a7134423ac8455c0166b28c9ff04d227b371"}, - {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd7730c98a3010cd4f523465867ff95cd9d6430db46676ce79358f65ae39797b"}, - {file = "tokenizers-0.13.3-cp39-cp39-win32.whl", hash = "sha256:48625a108029cb1ddf42e17a81b5a3230ba6888a70c9dc14e81bc319e812652d"}, - {file = "tokenizers-0.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:bc0a6f1ba036e482db6453571c9e3e60ecd5489980ffd95d11dc9f960483d783"}, - {file = "tokenizers-0.13.3.tar.gz", hash = "sha256:2e546dbb68b623008a5442353137fbb0123d311a6d7ba52f2667c8862a75af2e"}, -] - -[package.extras] -dev = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] -docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] -testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] - -[[package]] -name = "torch" -version = "2.3.1" -description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "torch-2.3.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:605a25b23944be5ab7c3467e843580e1d888b8066e5aaf17ff7bf9cc30001cc3"}, - {file = "torch-2.3.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f2357eb0965583a0954d6f9ad005bba0091f956aef879822274b1bcdb11bd308"}, - {file = "torch-2.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:32b05fe0d1ada7f69c9f86c14ff69b0ef1957a5a54199bacba63d22d8fab720b"}, - {file = "torch-2.3.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:7c09a94362778428484bcf995f6004b04952106aee0ef45ff0b4bab484f5498d"}, - {file = "torch-2.3.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:b2ec81b61bb094ea4a9dee1cd3f7b76a44555375719ad29f05c0ca8ef596ad39"}, - {file = "torch-2.3.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:490cc3d917d1fe0bd027057dfe9941dc1d6d8e3cae76140f5dd9a7e5bc7130ab"}, - {file = "torch-2.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:5802530783bd465fe66c2df99123c9a54be06da118fbd785a25ab0a88123758a"}, - {file = "torch-2.3.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:a7dd4ed388ad1f3d502bf09453d5fe596c7b121de7e0cfaca1e2017782e9bbac"}, - {file = "torch-2.3.1-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:a486c0b1976a118805fc7c9641d02df7afbb0c21e6b555d3bb985c9f9601b61a"}, - {file = "torch-2.3.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:224259821fe3e4c6f7edf1528e4fe4ac779c77addaa74215eb0b63a5c474d66c"}, - {file = "torch-2.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:e5fdccbf6f1334b2203a61a0e03821d5845f1421defe311dabeae2fc8fbeac2d"}, - {file = "torch-2.3.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:3c333dc2ebc189561514eda06e81df22bf8fb64e2384746b2cb9f04f96d1d4c8"}, - {file = "torch-2.3.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:07e9ba746832b8d069cacb45f312cadd8ad02b81ea527ec9766c0e7404bb3feb"}, - {file = "torch-2.3.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:462d1c07dbf6bb5d9d2f3316fee73a24f3d12cd8dacf681ad46ef6418f7f6626"}, - {file = "torch-2.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:ff60bf7ce3de1d43ad3f6969983f321a31f0a45df3690921720bcad6a8596cc4"}, - {file = "torch-2.3.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:bee0bd33dc58aa8fc8a7527876e9b9a0e812ad08122054a5bff2ce5abf005b10"}, - {file = "torch-2.3.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:aaa872abde9a3d4f91580f6396d54888620f4a0b92e3976a6034759df4b961ad"}, - {file = "torch-2.3.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:3d7a7f7ef21a7520510553dc3938b0c57c116a7daee20736a9e25cbc0e832bdc"}, - {file = "torch-2.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:4777f6cefa0c2b5fa87223c213e7b6f417cf254a45e5829be4ccd1b2a4ee1011"}, - {file = "torch-2.3.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:2bb5af780c55be68fe100feb0528d2edebace1d55cb2e351de735809ba7391eb"}, -] - -[package.dependencies] -filelock = "*" -fsspec = "*" -jinja2 = "*" -mkl = {version = ">=2021.1.1,<=2021.4.0", markers = "platform_system == \"Windows\""} -networkx = "*" -nvidia-cublas-cu12 = {version = "12.1.3.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-cupti-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-nvrtc-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-runtime-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cudnn-cu12 = {version = "8.9.2.26", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cufft-cu12 = {version = "11.0.2.54", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-curand-cu12 = {version = "10.3.2.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusolver-cu12 = {version = "11.4.5.107", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusparse-cu12 = {version = "12.1.0.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nccl-cu12 = {version = "2.20.5", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nvtx-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -sympy = "*" -triton = {version = "2.3.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.12\""} -typing-extensions = ">=4.8.0" - -[package.extras] -opt-einsum = ["opt-einsum (>=3.3)"] -optree = ["optree (>=0.9.1)"] - -[[package]] -name = "torchvision" -version = "0.18.1" -description = "image and video datasets and models for torch deep learning" -optional = false -python-versions = ">=3.8" -files = [ - {file = "torchvision-0.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3e694e54b0548dad99c12af6bf0c8e4f3350137d391dcd19af22a1c5f89322b3"}, - {file = "torchvision-0.18.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:0b3bda0aa5b416eeb547143b8eeaf17720bdba9cf516dc991aacb81811aa96a5"}, - {file = "torchvision-0.18.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:573ff523c739405edb085f65cb592f482d28a30e29b0be4c4ba08040b3ae785f"}, - {file = "torchvision-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:ef7bbbc60b38e831a75e547c66ca1784f2ac27100f9e4ddbe9614cef6cbcd942"}, - {file = "torchvision-0.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:80b5d794dd0fdba787adc22f1a367a5ead452327686473cb260dd94364bc56a6"}, - {file = "torchvision-0.18.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:9077cf590cdb3a5e8fdf5cdb71797f8c67713f974cf0228ecb17fcd670ab42f9"}, - {file = "torchvision-0.18.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:ceb993a882f1ae7ae373ed39c28d7e3e802205b0e59a7ed84ef4028f0bba8d7f"}, - {file = "torchvision-0.18.1-cp311-cp311-win_amd64.whl", hash = "sha256:52f7436140045dc2239cdc502aa76b2bd8bd676d64244ff154d304aa69852046"}, - {file = "torchvision-0.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2be6f0bf7c455c89a51a1dbb6f668d36c6edc479f49ac912d745d10df5715657"}, - {file = "torchvision-0.18.1-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:f118d887bfde3a948a41d56587525401e5cac1b7db2eaca203324d6ed2b1caca"}, - {file = "torchvision-0.18.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:13d24d904f65e62d66a1e0c41faec630bc193867b8a4a01166769e8a8e8df8e9"}, - {file = "torchvision-0.18.1-cp312-cp312-win_amd64.whl", hash = "sha256:ed6340b69a63a625e512a66127210d412551d9c5f2ad2978130c6a45bf56cd4a"}, - {file = "torchvision-0.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b1c3864fa9378c88bce8ad0ef3599f4f25397897ce612e1c245c74b97092f35e"}, - {file = "torchvision-0.18.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:02085a2ffc7461f5c0edb07d6f3455ee1806561f37736b903da820067eea58c7"}, - {file = "torchvision-0.18.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:9726c316a2501df8503e5a5dc46a631afd4c515a958972e5b7f7b9c87d2125c0"}, - {file = "torchvision-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:64a2662dbf30db9055d8b201d6e56f312a504e5ccd9d144c57c41622d3c524cb"}, - {file = "torchvision-0.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:975b8594c0f5288875408acbb74946eea786c5b008d129c0d045d0ead23742bc"}, - {file = "torchvision-0.18.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:da83c8bbd34d8bee48bfa1d1b40e0844bc3cba10ed825a5a8cbe3ce7b62264cd"}, - {file = "torchvision-0.18.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:54bfcd352abb396d5c9c237d200167c178bd136051b138e1e8ef46ce367c2773"}, - {file = "torchvision-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:5c8366a1aeee49e9ea9e64b30d199debdf06b1bd7610a76165eb5d7869c3bde5"}, -] - -[package.dependencies] -numpy = "*" -pillow = ">=5.3.0,<8.3.dev0 || >=8.4.dev0" -torch = "2.3.1" - -[package.extras] -scipy = ["scipy"] - -[[package]] -name = "tqdm" -version = "4.64.0" -description = "Fast, Extensible Progress Meter" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -files = [ - {file = "tqdm-4.64.0-py2.py3-none-any.whl", hash = "sha256:74a2cdefe14d11442cedf3ba4e21a3b84ff9a2dbdc6cfae2c34addb2a14a5ea6"}, - {file = "tqdm-4.64.0.tar.gz", hash = "sha256:40be55d30e200777a307a7585aee69e4eabb46b4ec6a4b4a5f2d9f11e7d5408d"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["py-make (>=0.1.0)", "twine", "wheel"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - -[[package]] -name = "transformers" -version = "4.29.2" -description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "transformers-4.29.2-py3-none-any.whl", hash = "sha256:0ef158b99bad6f4e6652a0d8655fbbe58b4cb788ce7040f320b5d29c7c810a75"}, - {file = "transformers-4.29.2.tar.gz", hash = "sha256:ed9467661f459f1ce49461d83f18f3b36b6a37f306182dc2ba272935f3b93ebb"}, -] - -[package.dependencies] -filelock = "*" -huggingface-hub = ">=0.14.1,<1.0" -numpy = ">=1.17" -packaging = ">=20.0" -pyyaml = ">=5.1" -regex = "!=2019.12.17" -requests = "*" -tokenizers = ">=0.11.1,<0.11.3 || >0.11.3,<0.14" -tqdm = ">=4.27" - -[package.extras] -accelerate = ["accelerate (>=0.19.0)"] -agents = ["Pillow", "accelerate (>=0.19.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch (>=1.9,!=1.12.0)"] -all = ["Pillow", "accelerate (>=0.19.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.6.9)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "numba (<0.57.0)", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision"] -audio = ["kenlm", "librosa", "numba (<0.57.0)", "phonemizer", "pyctcdecode (>=0.4.0)"] -codecarbon = ["codecarbon (==1.2.0)"] -deepspeed = ["accelerate (>=0.19.0)", "deepspeed (>=0.8.3)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.19.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.8.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf (<=3.20.2)", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "sentencepiece (>=0.1.91,!=0.1.92)", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow", "accelerate (>=0.19.0)", "av (==9.2.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.6.9)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "numba (<0.57.0)", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "numba (<0.57.0)", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "urllib3 (<2.0.0)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow", "accelerate (>=0.19.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "numba (<0.57.0)", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -docs = ["Pillow", "accelerate (>=0.19.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.6.9)", "hf-doc-builder", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "numba (<0.57.0)", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision"] -docs-specific = ["hf-doc-builder"] -fairscale = ["fairscale (>0.3)"] -flax = ["flax (>=0.4.1,<=0.6.9)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "optax (>=0.0.8,<=0.1.4)"] -flax-speech = ["kenlm", "librosa", "numba (<0.57.0)", "phonemizer", "pyctcdecode (>=0.4.0)"] -ftfy = ["ftfy"] -integrations = ["optuna", "ray[tune]", "sigopt"] -ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] -modelcreation = ["cookiecutter (==1.7.3)"] -natten = ["natten (>=0.14.6)"] -onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] -onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] -optuna = ["optuna"] -quality = ["GitPython (<3.1.19)", "black (>=23.1,<24.0)", "datasets (!=2.5.0)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "ruff (>=0.0.241,<=0.0.259)", "urllib3 (<2.0.0)"] -ray = ["ray[tune]"] -retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] -sagemaker = ["sagemaker (>=2.31.0)"] -sentencepiece = ["protobuf (<=3.20.2)", "sentencepiece (>=0.1.91,!=0.1.92)"] -serving = ["fastapi", "pydantic", "starlette", "uvicorn"] -sigopt = ["sigopt"] -sklearn = ["scikit-learn"] -speech = ["kenlm", "librosa", "numba (<0.57.0)", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf (<=3.20.2)", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "timeout-decorator"] -tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx"] -tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx"] -tf-speech = ["kenlm", "librosa", "numba (<0.57.0)", "phonemizer", "pyctcdecode (>=0.4.0)"] -timm = ["timm"] -tokenizers = ["tokenizers (>=0.11.1,!=0.11.3,<0.14)"] -torch = ["accelerate (>=0.19.0)", "torch (>=1.9,!=1.12.0)"] -torch-speech = ["kenlm", "librosa", "numba (<0.57.0)", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -torch-vision = ["Pillow", "torchvision"] -torchhub = ["filelock", "huggingface-hub (>=0.14.1,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf (<=3.20.2)", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "tqdm (>=4.27)"] -video = ["av (==9.2.0)", "decord (==0.6.0)"] -vision = ["Pillow"] - -[[package]] -name = "triton" -version = "2.3.1" -description = "A language and compiler for custom Deep Learning operations" -optional = false -python-versions = "*" -files = [ - {file = "triton-2.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c84595cbe5e546b1b290d2a58b1494df5a2ef066dd890655e5b8a8a92205c33"}, - {file = "triton-2.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9d64ae33bcb3a7a18081e3a746e8cf87ca8623ca13d2c362413ce7a486f893e"}, - {file = "triton-2.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaf80e8761a9e3498aa92e7bf83a085b31959c61f5e8ac14eedd018df6fccd10"}, - {file = "triton-2.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b13bf35a2b659af7159bf78e92798dc62d877aa991de723937329e2d382f1991"}, - {file = "triton-2.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63381e35ded3304704ea867ffde3b7cfc42c16a55b3062d41e017ef510433d66"}, - {file = "triton-2.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d968264523c7a07911c8fb51b4e0d1b920204dae71491b1fe7b01b62a31e124"}, -] - -[package.dependencies] -filelock = "*" - -[package.extras] -build = ["cmake (>=3.20)", "lit"] -tests = ["autopep8", "flake8", "isort", "numpy", "pytest", "scipy (>=1.7.1)", "torch"] -tutorials = ["matplotlib", "pandas", "tabulate", "torch"] - -[[package]] -name = "tritonclient" -version = "2.41.0" -description = "Python client library and utilities for communicating with Triton Inference Server" -optional = false -python-versions = "*" -files = [ - {file = "tritonclient-2.41.0-py3-none-any.whl", hash = "sha256:ee543c3436f1a068f4f4a243a729e27006b07ea6477b38ecd8f62d167835e092"}, - {file = "tritonclient-2.41.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:cf4ed5df836fa99937678a9fd852f86ba05c7b83656dd0a540e20afdb9ab4629"}, - {file = "tritonclient-2.41.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:2314843f6a3c50a8e20166e56cc47af9ac41a3c100e919eddbbefa5ba09bd1b9"}, -] - -[package.dependencies] -aiohttp = {version = ">=3.8.1,<4.0.0", optional = true, markers = "extra == \"all\""} -cuda-python = {version = "*", optional = true, markers = "extra == \"all\""} -geventhttpclient = {version = ">=1.4.4,<=2.0.2", optional = true, markers = "extra == \"all\""} -grpcio = {version = ">=1.41.0", optional = true, markers = "extra == \"all\""} -numpy = ">=1.19.1" -packaging = {version = ">=14.1", optional = true, markers = "extra == \"all\""} -python-rapidjson = ">=0.9.1" - -[package.extras] -all = ["aiohttp (>=3.8.1,<4.0.0)", "cuda-python", "geventhttpclient (>=1.4.4,<=2.0.2)", "grpcio (>=1.41.0)", "numpy (>=1.19.1)", "packaging (>=14.1)", "python-rapidjson (>=0.9.1)"] -cuda = ["cuda-python"] -grpc = ["grpcio (>=1.41.0)", "numpy (>=1.19.1)", "packaging (>=14.1)", "python-rapidjson (>=0.9.1)"] -http = ["aiohttp (>=3.8.1,<4.0.0)", "geventhttpclient (>=1.4.4,<=2.0.2)", "numpy (>=1.19.1)", "python-rapidjson (>=0.9.1)"] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - -[[package]] -name = "typing-inspect" -version = "0.6.0" -description = "Runtime inspection utilities for typing module." -optional = false -python-versions = "*" -files = [ - {file = "typing_inspect-0.6.0-py2-none-any.whl", hash = "sha256:de08f50a22955ddec353876df7b2545994d6df08a2f45d54ac8c05e530372ca0"}, - {file = "typing_inspect-0.6.0-py3-none-any.whl", hash = "sha256:3b98390df4d999a28cf5b35d8b333425af5da2ece8a4ea9e98f71e7591347b4f"}, - {file = "typing_inspect-0.6.0.tar.gz", hash = "sha256:8f1b1dd25908dbfd81d3bebc218011531e7ab614ba6e5bf7826d887c834afab7"}, -] - -[package.dependencies] -mypy-extensions = ">=0.3.0" -typing-extensions = ">=3.7.4" - -[[package]] -name = "urllib3" -version = "2.2.2" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "wrapt" -version = "1.16.0" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = ">=3.6" -files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, -] - -[[package]] -name = "yarl" -version = "1.9.4" -description = "Yet another URL library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, - {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, - {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, - {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, - {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, -] - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - -[[package]] -name = "zope-event" -version = "5.0" -description = "Very basic event publishing system" -optional = false -python-versions = ">=3.7" -files = [ - {file = "zope.event-5.0-py3-none-any.whl", hash = "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26"}, - {file = "zope.event-5.0.tar.gz", hash = "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd"}, -] - -[package.dependencies] -setuptools = "*" - -[package.extras] -docs = ["Sphinx"] -test = ["zope.testrunner"] - -[[package]] -name = "zope-interface" -version = "6.4.post2" -description = "Interfaces for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "zope.interface-6.4.post2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2eccd5bef45883802848f821d940367c1d0ad588de71e5cabe3813175444202c"}, - {file = "zope.interface-6.4.post2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:762e616199f6319bb98e7f4f27d254c84c5fb1c25c908c2a9d0f92b92fb27530"}, - {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef8356f16b1a83609f7a992a6e33d792bb5eff2370712c9eaae0d02e1924341"}, - {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e4fa5d34d7973e6b0efa46fe4405090f3b406f64b6290facbb19dcbf642ad6b"}, - {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d22fce0b0f5715cdac082e35a9e735a1752dc8585f005d045abb1a7c20e197f9"}, - {file = "zope.interface-6.4.post2-cp310-cp310-win_amd64.whl", hash = "sha256:97e615eab34bd8477c3f34197a17ce08c648d38467489359cb9eb7394f1083f7"}, - {file = "zope.interface-6.4.post2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:599f3b07bde2627e163ce484d5497a54a0a8437779362395c6b25e68c6590ede"}, - {file = "zope.interface-6.4.post2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:136cacdde1a2c5e5bc3d0b2a1beed733f97e2dad8c2ad3c2e17116f6590a3827"}, - {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47937cf2e7ed4e0e37f7851c76edeb8543ec9b0eae149b36ecd26176ff1ca874"}, - {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f0a6be264afb094975b5ef55c911379d6989caa87c4e558814ec4f5125cfa2e"}, - {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47654177e675bafdf4e4738ce58cdc5c6d6ee2157ac0a78a3fa460942b9d64a8"}, - {file = "zope.interface-6.4.post2-cp311-cp311-win_amd64.whl", hash = "sha256:e2fb8e8158306567a3a9a41670c1ff99d0567d7fc96fa93b7abf8b519a46b250"}, - {file = "zope.interface-6.4.post2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b912750b13d76af8aac45ddf4679535def304b2a48a07989ec736508d0bbfbde"}, - {file = "zope.interface-6.4.post2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ac46298e0143d91e4644a27a769d1388d5d89e82ee0cf37bf2b0b001b9712a4"}, - {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86a94af4a88110ed4bb8961f5ac72edf782958e665d5bfceaab6bf388420a78b"}, - {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73f9752cf3596771c7726f7eea5b9e634ad47c6d863043589a1c3bb31325c7eb"}, - {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b5c3e9744dcdc9e84c24ed6646d5cf0cf66551347b310b3ffd70f056535854"}, - {file = "zope.interface-6.4.post2-cp312-cp312-win_amd64.whl", hash = "sha256:551db2fe892fcbefb38f6f81ffa62de11090c8119fd4e66a60f3adff70751ec7"}, - {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96ac6b3169940a8cd57b4f2b8edcad8f5213b60efcd197d59fbe52f0accd66e"}, - {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cebff2fe5dc82cb22122e4e1225e00a4a506b1a16fafa911142ee124febf2c9e"}, - {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33ee982237cffaf946db365c3a6ebaa37855d8e3ca5800f6f48890209c1cfefc"}, - {file = "zope.interface-6.4.post2-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:fbf649bc77510ef2521cf797700b96167bb77838c40780da7ea3edd8b78044d1"}, - {file = "zope.interface-6.4.post2-cp37-cp37m-win_amd64.whl", hash = "sha256:4c0b208a5d6c81434bdfa0f06d9b667e5de15af84d8cae5723c3a33ba6611b82"}, - {file = "zope.interface-6.4.post2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d3fe667935e9562407c2511570dca14604a654988a13d8725667e95161d92e9b"}, - {file = "zope.interface-6.4.post2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a96e6d4074db29b152222c34d7eec2e2db2f92638d2b2b2c704f9e8db3ae0edc"}, - {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:866a0f583be79f0def667a5d2c60b7b4cc68f0c0a470f227e1122691b443c934"}, - {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fe919027f29b12f7a2562ba0daf3e045cb388f844e022552a5674fcdf5d21f1"}, - {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e0343a6e06d94f6b6ac52fbc75269b41dd3c57066541a6c76517f69fe67cb43"}, - {file = "zope.interface-6.4.post2-cp38-cp38-win_amd64.whl", hash = "sha256:dabb70a6e3d9c22df50e08dc55b14ca2a99da95a2d941954255ac76fd6982bc5"}, - {file = "zope.interface-6.4.post2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:706efc19f9679a1b425d6fa2b4bc770d976d0984335eaea0869bd32f627591d2"}, - {file = "zope.interface-6.4.post2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d136e5b8821073e1a09dde3eb076ea9988e7010c54ffe4d39701adf0c303438"}, - {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1730c93a38b5a18d24549bc81613223962a19d457cfda9bdc66e542f475a36f4"}, - {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc2676312cc3468a25aac001ec727168994ea3b69b48914944a44c6a0b251e79"}, - {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a62fd6cd518693568e23e02f41816adedfca637f26716837681c90b36af3671"}, - {file = "zope.interface-6.4.post2-cp39-cp39-win_amd64.whl", hash = "sha256:d3f7e001328bd6466b3414215f66dde3c7c13d8025a9c160a75d7b2687090d15"}, - {file = "zope.interface-6.4.post2.tar.gz", hash = "sha256:1c207e6f6dfd5749a26f5a5fd966602d6b824ec00d2df84a7e9a924e8933654e"}, -] - -[package.dependencies] -setuptools = "*" - -[package.extras] -docs = ["Sphinx", "repoze.sphinx.autointerface", "sphinx-rtd-theme"] -test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] -testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] - -[metadata] -lock-version = "2.0" -python-versions = "^3.10" -content-hash = "36b0d8bae6f499957d37c37266649a6810ddb42ba225349cdc25a839317d1e9d" diff --git a/stf/stf-api-alternative/.ipynb_checkpoints/pyproject-checkpoint.toml b/stf/stf-api-alternative/.ipynb_checkpoints/pyproject-checkpoint.toml deleted file mode 100644 index 741b27a48a985abdfa82578b7476f9756ad5bf33..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/.ipynb_checkpoints/pyproject-checkpoint.toml +++ /dev/null @@ -1,35 +0,0 @@ -[tool.poetry] -name = "stf-alternative" -version = "0.1.0" -description = "alternative version of stf-api" -authors = ["Kim Minjong "] -readme = "README.md" -packages = [ - {include = "stf_alternative", from="src"} -] - -[tool.poetry.dependencies] -python = "^3.10" -librosa = "0.8.1" -imageio = "2.13.5" -imageio-ffmpeg = "0.4.5" -Pillow = "9.1.0" -tqdm = "4.64.0" -numpy = "1.22.4" -addict = "2.4.0" -scipy = "1.12.0" -pandas = "1.3.5" -face_alignment = "1.3.5" -moviepy = "1.0.3" -transformers = "4.29.2" -facenet_pytorch = "2.5.2" -ffmpeg-python = "^0.2" -pydub = "^0.25" -av = "^11.0.0" -nvidia-pytriton = {extras = ["client"], version = "^0.4.2"} -asyncstdlib = "^3.10.9" - - -[build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" diff --git a/stf/stf-api-alternative/README.md b/stf/stf-api-alternative/README.md deleted file mode 100644 index 5a16c5d0a638b6899d6de799e0a02ce0da0351c8..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/README.md +++ /dev/null @@ -1 +0,0 @@ -stf_api와 동일한 기능을 수행하는 라이브러리 diff --git a/stf/stf-api-alternative/poetry.lock b/stf/stf-api-alternative/poetry.lock deleted file mode 100644 index 70d3bdeb4280d17a172a656ff6fbc1ce9680669c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/poetry.lock +++ /dev/null @@ -1,3251 +0,0 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. - -[[package]] -name = "addict" -version = "2.4.0" -description = "Addict is a dictionary whose items can be set using both attribute and item syntax." -optional = false -python-versions = "*" -files = [ - {file = "addict-2.4.0-py3-none-any.whl", hash = "sha256:249bb56bbfd3cdc2a004ea0ff4c2b6ddc84d53bc2194761636eb314d5cfa5dfc"}, - {file = "addict-2.4.0.tar.gz", hash = "sha256:b3b2210e0e067a281f5646c8c5db92e99b7231ea8b0eb5f74dbdf9e259d4e494"}, -] - -[[package]] -name = "aiohttp" -version = "3.9.5" -description = "Async http client/server framework (asyncio)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, - {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, - {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, - {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"}, - {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"}, - {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"}, - {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"}, - {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, - {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, - {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, -] - -[package.dependencies] -aiosignal = ">=1.1.2" -async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} -attrs = ">=17.3.0" -frozenlist = ">=1.1.1" -multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" - -[package.extras] -speedups = ["Brotli", "aiodns", "brotlicffi"] - -[[package]] -name = "aiosignal" -version = "1.3.1" -description = "aiosignal: a list of registered asynchronous callbacks" -optional = false -python-versions = ">=3.7" -files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, -] - -[package.dependencies] -frozenlist = ">=1.1.0" - -[[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] - -[[package]] -name = "asyncstdlib" -version = "3.12.4" -description = "The missing async toolbox" -optional = false -python-versions = "~=3.8" -files = [ - {file = "asyncstdlib-3.12.4-py3-none-any.whl", hash = "sha256:8e269c30906658faca35936d0348c1057aff4df1ee125f6ce564feeb72212d5e"}, - {file = "asyncstdlib-3.12.4.tar.gz", hash = "sha256:c87e2e2ebfea47d24af728e1caab2a4fb705228508679f30e34afdcbd0097a05"}, -] - -[package.extras] -doc = ["sphinx", "sphinxcontrib-trio"] -test = ["black", "coverage", "flake8", "flake8-2020", "flake8-bugbear", "mypy", "pytest", "pytest-cov"] -typetest = ["mypy", "pyright", "typing-extensions"] - -[[package]] -name = "attrs" -version = "23.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] - -[[package]] -name = "audioread" -version = "3.0.1" -description = "Multi-library, cross-platform audio decoding." -optional = false -python-versions = ">=3.6" -files = [ - {file = "audioread-3.0.1-py3-none-any.whl", hash = "sha256:4cdce70b8adc0da0a3c9e0d85fb10b3ace30fbdf8d1670fd443929b61d117c33"}, - {file = "audioread-3.0.1.tar.gz", hash = "sha256:ac5460a5498c48bdf2e8e767402583a4dcd13f4414d286f42ce4379e8b35066d"}, -] - -[package.extras] -test = ["tox"] - -[[package]] -name = "av" -version = "11.0.0" -description = "Pythonic bindings for FFmpeg's libraries." -optional = false -python-versions = ">=3.8" -files = [ - {file = "av-11.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a01f13b37eb6d181e03bbbbda29093fe2d68f10755795188220acdc89560ec27"}, - {file = "av-11.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b2236faee1b5d71dff3cdef81ef6eec22cc8b71dbfb45eb037e6437fe80f24e7"}, - {file = "av-11.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40543a08e5c84aecd2bc84da5d43548743201897f0ba21bf5ae3a4dcddefca2b"}, - {file = "av-11.0.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2907376884d956376aaf3bc1905fa4e0dcb9ba4e0d183e519392a19d89317d1b"}, - {file = "av-11.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8d5581dcdc81cd601e3ce036809f14da82c46ff187bcefe981ec819390e0ab0"}, - {file = "av-11.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:150490f2a62cfa470f3cb60f3a0060ff93afd807e2b7b3b0eeeb5a992eb8d67b"}, - {file = "av-11.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d9bac0de62f09e2cb4e2132b5a46a89bc31c898189aa285b484c17351d991afe"}, - {file = "av-11.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2122ff8bdace4ce50207920f37de472517921e2ca1f0503464f748fdb8e20506"}, - {file = "av-11.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:527d840697fee6ad4cf47eba987eaf30cd76bd96b2d20eaa907e166b9b8065c8"}, - {file = "av-11.0.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abeaedddfca9101886eb6fc47318c5f5ece8480d330d73aacf6917d7421981a2"}, - {file = "av-11.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13790fbb889b955baf885fe3761e923e85537ef414173465ec293177cedb7b99"}, - {file = "av-11.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:fc27e27f52480287f44226ad4ae3eb53346bf027959d0f00a9154530bd98b371"}, - {file = "av-11.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:892583e2c6b8c2500e5d24310f499caefcdaa2e48c8f7169ad41041aaaf4da11"}, - {file = "av-11.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6943679d70a9f4de974049e7ae2cf0b20afe0d7ddab650526c02a6cf9adcd08f"}, - {file = "av-11.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6d73b038ccf1df5c16bc643eee5c694fb7732e09375e2f4903c1f4ce90dfb72"}, - {file = "av-11.0.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c83422db3333e97b9680700df5185139352fc3a568b14179da3bdcbeb2f0e91b"}, - {file = "av-11.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8413900f6a3639e0088c018a3a516a1656d4d16799e7aa759a16ddf3bd268e2b"}, - {file = "av-11.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:908e49ee336223801d8f2f7dca5a1deb64e9d8256138b8e7a79013b682a6ebb5"}, - {file = "av-11.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:82411ae4a562da07b76028d2f349fb0e6a86aa78ad2b18d2d7bf5b06b17fba14"}, - {file = "av-11.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:621104bd63e38fa4eca554da3722b1aac329619de39152f27eec8999acc72342"}, - {file = "av-11.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:442878990c094455a16c10127edcc54bc4e78d355e6a13ad2a27608b0ecda38f"}, - {file = "av-11.0.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:658199c92987dc72511f5ee8ade62faef6234b7a04c8b5788de99e366be5e073"}, - {file = "av-11.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4b381665c49267b46f87297573898b85e5c41384750fee2e70267fbc4ba318"}, - {file = "av-11.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:60de14f71293e36ca4e297cc8a8460f0cf74f38a201694f3c6fc7f40301582f2"}, - {file = "av-11.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a90f04af96374dab94028a7471597bdfcf03083338b9be2eb8ca4805a8ec7ab5"}, - {file = "av-11.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8821ab2d23e4cb5c8abea6b08d2b1bfceca6af2d88fab1d1dc1b3ec7b34933c7"}, - {file = "av-11.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a92342ed307eeaf9509a6b0f3bafd4337c4880c851b50acc18df48c625b63b6"}, - {file = "av-11.0.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe3502975bc844f5d432c1f24d331bf6ef3e05532ebf06f7ed08b60719b8ea5"}, - {file = "av-11.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c278b3a4fd111b4c9190abe6b1a5ca358d5f91e851d470b62577b957e0187b09"}, - {file = "av-11.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:478aa1d54fbc3058ea65ff41086b6adbe1326b456a027d2f3b59dbe60b4ac2ca"}, - {file = "av-11.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e8df10bb2d56a981d02a8a0b41491912b76dad06305d174a2575ef55ad451100"}, - {file = "av-11.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b30c51e597785a89241bd61865faff2dbd3327856a8285a1e120dbf60e18348b"}, - {file = "av-11.0.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8b8bd92edb096699b306e7b090ad096925ca3bdae6f89656f023fa2a2da627d"}, - {file = "av-11.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9383af733abfc44f6fc29307a6c922fbf671ee343dc97b78b74eac6a2346a46d"}, - {file = "av-11.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a9df4a60579198b560f641cdfe4c2139948a70193ddc096b275f2cf6d94e3e04"}, - {file = "av-11.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8ae5f7ae0a7093fb813686d4aa4c554531f80a28480427f5c155da51b747eff0"}, - {file = "av-11.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50fb7d606f8236891d773c701d5650b93af8dbf78eeaac36fc7e1f7f64a9d664"}, - {file = "av-11.0.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:543e0f9bf6ff02dedbe66d906fbc89c8907c80a8ea7413fc3fed68ce4a6e9b44"}, - {file = "av-11.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:daa279c884457ab194ce78bdd89c0aa391af733da95fb3258d4c6eb8c258299a"}, - {file = "av-11.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1aacc21f4cf96447117a61edfb776afb73186750a5e08a21484ddfc3599aefb5"}, - {file = "av-11.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2568b38eef777b916a5d02e42b8f67f92e12023531239ddd32e1ca4f3cdf8c5b"}, - {file = "av-11.0.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:747c6d347e27c59cc2e78c9c505d23cd88eceff0cc9386be73693ae9009a577c"}, - {file = "av-11.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bbd8f4941b9d3450eff40003b9b9d904667aec7ab085fa31f0f9bca32d755e0"}, - {file = "av-11.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f39c1244ba0cf185b2722aeec116b8a98a2ee5728ce687cec0bda60ee0360dfc"}, - {file = "av-11.0.0.tar.gz", hash = "sha256:48223f000a252070f8e700ff634bb7fb3aa1b7bc7e450373029fbdd6f369ac31"}, -] - -[[package]] -name = "brotli" -version = "1.1.0" -description = "Python bindings for the Brotli compression library" -optional = false -python-versions = "*" -files = [ - {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1140c64812cb9b06c922e77f1c26a75ec5e3f0fb2bf92cc8c58720dec276752"}, - {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8fd5270e906eef71d4a8d19b7c6a43760c6abcfcc10c9101d14eb2357418de9"}, - {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ae56aca0402a0f9a3431cddda62ad71666ca9d4dc3a10a142b9dce2e3c0cda3"}, - {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43ce1b9935bfa1ede40028054d7f48b5469cd02733a365eec8a329ffd342915d"}, - {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7c4855522edb2e6ae7fdb58e07c3ba9111e7621a8956f481c68d5d979c93032e"}, - {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:38025d9f30cf4634f8309c6874ef871b841eb3c347e90b0851f63d1ded5212da"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6a904cb26bfefc2f0a6f240bdf5233be78cd2488900a2f846f3c3ac8489ab80"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"}, - {file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"}, - {file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"}, - {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"}, - {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c8146669223164fc87a7e3de9f81e9423c67a79d6b3447994dfb9c95da16e2d6"}, - {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30924eb4c57903d5a7526b08ef4a584acc22ab1ffa085faceb521521d2de32dd"}, - {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ceb64bbc6eac5a140ca649003756940f8d6a7c444a68af170b3187623b43bebf"}, - {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a469274ad18dc0e4d316eefa616d1d0c2ff9da369af19fa6f3daa4f09671fd61"}, - {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524f35912131cc2cabb00edfd8d573b07f2d9f21fa824bd3fb19725a9cf06327"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5b3cc074004d968722f51e550b41a27be656ec48f8afaeeb45ebf65b561481dd"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"}, - {file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"}, - {file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"}, - {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"}, - {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"}, - {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"}, - {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f4bf76817c14aa98cc6697ac02f3972cb8c3da93e9ef16b9c66573a68014f91"}, - {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0c5516f0aed654134a2fc936325cc2e642f8a0e096d075209672eb321cff408"}, - {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c3020404e0b5eefd7c9485ccf8393cfb75ec38ce75586e046573c9dc29967a0"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ed11165dd45ce798d99a136808a794a748d5dc38511303239d4e2363c0695dc"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"}, - {file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"}, - {file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"}, - {file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"}, - {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"}, - {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"}, - {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4d4a848d1837973bf0f4b5e54e3bec977d99be36a7895c61abb659301b02c112"}, - {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fdc3ff3bfccdc6b9cc7c342c03aa2400683f0cb891d46e94b64a197910dc4064"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5eeb539606f18a0b232d4ba45adccde4125592f3f636a6182b4a8a436548b914"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"}, - {file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"}, - {file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"}, - {file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"}, - {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f733d788519c7e3e71f0855c96618720f5d3d60c3cb829d8bbb722dddce37985"}, - {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:929811df5462e182b13920da56c6e0284af407d1de637d8e536c5cd00a7daf60"}, - {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b63b949ff929fbc2d6d3ce0e924c9b93c9785d877a21a1b678877ffbbc4423a"}, - {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d192f0f30804e55db0d0e0a35d83a9fead0e9a359a9ed0285dbacea60cc10a84"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f296c40e23065d0d6650c4aefe7470d2a25fffda489bcc3eb66083f3ac9f6643"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"}, - {file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"}, - {file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"}, - {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"}, - {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:03d20af184290887bdea3f0f78c4f737d126c74dc2f3ccadf07e54ceca3bf208"}, - {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6172447e1b368dcbc458925e5ddaf9113477b0ed542df258d84fa28fc45ceea7"}, - {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a743e5a28af5f70f9c080380a5f908d4d21d40e8f0e0c8901604d15cfa9ba751"}, - {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0541e747cce78e24ea12d69176f6a7ddb690e62c425e01d31cc065e69ce55b48"}, - {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cdbc1fc1bc0bff1cef838eafe581b55bfbffaed4ed0318b724d0b71d4d377619"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:890b5a14ce214389b2cc36ce82f3093f96f4cc730c1cffdbefff77a7c71f2a97"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"}, - {file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"}, - {file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"}, - {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"}, - {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7905193081db9bfa73b1219140b3d315831cbff0d8941f22da695832f0dd188f"}, - {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a77def80806c421b4b0af06f45d65a136e7ac0bdca3c09d9e2ea4e515367c7e9"}, - {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dadd1314583ec0bf2d1379f7008ad627cd6336625d6679cf2f8e67081b83acf"}, - {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:901032ff242d479a0efa956d853d16875d42157f98951c0230f69e69f9c09bac"}, - {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22fc2a8549ffe699bfba2256ab2ed0421a7b8fadff114a3d201794e45a9ff578"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ae15b066e5ad21366600ebec29a7ccbc86812ed267e4b28e860b8ca16a2bc474"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"}, - {file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"}, - {file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"}, - {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"}, -] - -[[package]] -name = "certifi" -version = "2024.7.4" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, -] - -[[package]] -name = "cffi" -version = "1.16.0" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "cuda-python" -version = "12.5.0" -description = "Python bindings for CUDA" -optional = false -python-versions = "*" -files = [ - {file = "cuda_python-12.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bbfacbbcfcae149dbc937aa614d362608f20bba973882002fee9125ced32604"}, - {file = "cuda_python-12.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f087acc19ac4b467d71cfb7a39306038993176a7a1459426da50afa0fe68c697"}, - {file = "cuda_python-12.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:6a462a2d5d2658041a317c113023ce97a609f98c25272b5b1d13e274eecbb941"}, - {file = "cuda_python-12.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:560453304aee0a3bc28a2626e8e3fd1f94fcea58912b4a9156e6ef2a0121f2ba"}, - {file = "cuda_python-12.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c2f3fc9b8149ffcb4016d8e5ed4ae1d728a55fa02de9b3d2c2c14cda7e4ee8e"}, - {file = "cuda_python-12.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:62d692bcf4fb71224290610df52ce33760e0b08a8a1bf0652003712d70caf063"}, - {file = "cuda_python-12.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f76b2c8b7c97174d62f965ad8a518d10b27cd724ad06bb9f0f59ba6f80b492f"}, - {file = "cuda_python-12.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e7c1d70bf1d6696dfd4a72dfd129cd963b377cfb7ffa9cc6e888101c2b04df4"}, - {file = "cuda_python-12.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:c58a4a86fd4b71cc072a9dfb4789994bebb9a333085ffa75530672fb6380df6c"}, - {file = "cuda_python-12.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cece998c45a35d222edca2db623948c9a3bc4fce6e3a4d6d98e6aa245d28a7ec"}, - {file = "cuda_python-12.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4e9545b69cd3defba350ed9fb7d599c2c558646877a58f8a51ee2cc7a5ad43d"}, - {file = "cuda_python-12.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:22a09b8f0becad338e6e2c2cc97b72d25823517dd8a355009faf43f345ed7d04"}, -] - -[[package]] -name = "decorator" -version = "4.4.2" -description = "Decorators for Humans" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*" -files = [ - {file = "decorator-4.4.2-py2.py3-none-any.whl", hash = "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760"}, - {file = "decorator-4.4.2.tar.gz", hash = "sha256:e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7"}, -] - -[[package]] -name = "face-alignment" -version = "1.3.5" -description = "Detector 2D or 3D face landmarks from Python" -optional = false -python-versions = ">=3" -files = [ - {file = "face_alignment-1.3.5-py2.py3-none-any.whl", hash = "sha256:f0d9d0f8276ff9cccb73f37665acf3e8721a09b758148d767116befa43a73945"}, - {file = "face_alignment-1.3.5.tar.gz", hash = "sha256:20e940a7c769c7c4cdfd1d4ef1212ec68468721b8df0c17bf93b29b37c61a071"}, -] - -[package.dependencies] -numba = "*" -numpy = "*" -opencv-python = "*" -scikit-image = "*" -scipy = ">=0.17" -torch = "*" -tqdm = "*" - -[[package]] -name = "facenet-pytorch" -version = "2.5.2" -description = "Pretrained Pytorch face detection and recognition models" -optional = false -python-versions = "*" -files = [ - {file = "facenet-pytorch-2.5.2.tar.gz", hash = "sha256:6188402b90f29b23f4de31834f275f8d8eea83d085c2845bad4bad80547b7b53"}, - {file = "facenet_pytorch-2.5.2-py3-none-any.whl", hash = "sha256:5d4be649f86f5a09837a5972b143ddc65e19016f52eef6e4ee6afb0044253b98"}, -] - -[package.dependencies] -numpy = "*" -pillow = "*" -requests = "*" -torchvision = "*" - -[[package]] -name = "ffmpeg-python" -version = "0.2.0" -description = "Python bindings for FFmpeg - with complex filtering support" -optional = false -python-versions = "*" -files = [ - {file = "ffmpeg-python-0.2.0.tar.gz", hash = "sha256:65225db34627c578ef0e11c8b1eb528bb35e024752f6f10b78c011f6f64c4127"}, - {file = "ffmpeg_python-0.2.0-py3-none-any.whl", hash = "sha256:ac441a0404e053f8b6a1113a77c0f452f1cfc62f6344a769475ffdc0f56c23c5"}, -] - -[package.dependencies] -future = "*" - -[package.extras] -dev = ["Sphinx (==2.1.0)", "future (==0.17.1)", "numpy (==1.16.4)", "pytest (==4.6.1)", "pytest-mock (==1.10.4)", "tox (==3.12.1)"] - -[[package]] -name = "filelock" -version = "3.15.4" -description = "A platform independent file lock." -optional = false -python-versions = ">=3.8" -files = [ - {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, - {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] -typing = ["typing-extensions (>=4.8)"] - -[[package]] -name = "frozenlist" -version = "1.4.1" -description = "A list-like structure which implements collections.abc.MutableSequence" -optional = false -python-versions = ">=3.8" -files = [ - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, - {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, - {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, - {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, - {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, - {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, - {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, - {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, - {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, - {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, - {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, - {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, - {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, -] - -[[package]] -name = "fsspec" -version = "2024.6.1" -description = "File-system specification" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"}, - {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"}, -] - -[package.extras] -abfs = ["adlfs"] -adl = ["adlfs"] -arrow = ["pyarrow (>=1)"] -dask = ["dask", "distributed"] -dev = ["pre-commit", "ruff"] -doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"] -dropbox = ["dropbox", "dropboxdrivefs", "requests"] -full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] -fuse = ["fusepy"] -gcs = ["gcsfs"] -git = ["pygit2"] -github = ["requests"] -gs = ["gcsfs"] -gui = ["panel"] -hdfs = ["pyarrow (>=1)"] -http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] -libarchive = ["libarchive-c"] -oci = ["ocifs"] -s3 = ["s3fs"] -sftp = ["paramiko"] -smb = ["smbprotocol"] -ssh = ["paramiko"] -test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"] -test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask-expr", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"] -test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"] -tqdm = ["tqdm"] - -[[package]] -name = "future" -version = "1.0.0" -description = "Clean single-source support for Python 3 and 2" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216"}, - {file = "future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05"}, -] - -[[package]] -name = "gevent" -version = "24.2.1" -description = "Coroutine-based network library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "gevent-24.2.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:6f947a9abc1a129858391b3d9334c45041c08a0f23d14333d5b844b6e5c17a07"}, - {file = "gevent-24.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde283313daf0b34a8d1bab30325f5cb0f4e11b5869dbe5bc61f8fe09a8f66f3"}, - {file = "gevent-24.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a1df555431f5cd5cc189a6ee3544d24f8c52f2529134685f1e878c4972ab026"}, - {file = "gevent-24.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14532a67f7cb29fb055a0e9b39f16b88ed22c66b96641df8c04bdc38c26b9ea5"}, - {file = "gevent-24.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd23df885318391856415e20acfd51a985cba6919f0be78ed89f5db9ff3a31cb"}, - {file = "gevent-24.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ca80b121bbec76d7794fcb45e65a7eca660a76cc1a104ed439cdbd7df5f0b060"}, - {file = "gevent-24.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9913c45d1be52d7a5db0c63977eebb51f68a2d5e6fd922d1d9b5e5fd758cc98"}, - {file = "gevent-24.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:918cdf8751b24986f915d743225ad6b702f83e1106e08a63b736e3a4c6ead789"}, - {file = "gevent-24.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:3d5325ccfadfd3dcf72ff88a92fb8fc0b56cacc7225f0f4b6dcf186c1a6eeabc"}, - {file = "gevent-24.2.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:03aa5879acd6b7076f6a2a307410fb1e0d288b84b03cdfd8c74db8b4bc882fc5"}, - {file = "gevent-24.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8bb35ce57a63c9a6896c71a285818a3922d8ca05d150fd1fe49a7f57287b836"}, - {file = "gevent-24.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d7f87c2c02e03d99b95cfa6f7a776409083a9e4d468912e18c7680437b29222c"}, - {file = "gevent-24.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:968581d1717bbcf170758580f5f97a2925854943c45a19be4d47299507db2eb7"}, - {file = "gevent-24.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7899a38d0ae7e817e99adb217f586d0a4620e315e4de577444ebeeed2c5729be"}, - {file = "gevent-24.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f5e8e8d60e18d5f7fd49983f0c4696deeddaf6e608fbab33397671e2fcc6cc91"}, - {file = "gevent-24.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fbfdce91239fe306772faab57597186710d5699213f4df099d1612da7320d682"}, - {file = "gevent-24.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cdf66977a976d6a3cfb006afdf825d1482f84f7b81179db33941f2fc9673bb1d"}, - {file = "gevent-24.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:1dffb395e500613e0452b9503153f8f7ba587c67dd4a85fc7cd7aa7430cb02cc"}, - {file = "gevent-24.2.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:6c47ae7d1174617b3509f5d884935e788f325eb8f1a7efc95d295c68d83cce40"}, - {file = "gevent-24.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7cac622e11b4253ac4536a654fe221249065d9a69feb6cdcd4d9af3503602e0"}, - {file = "gevent-24.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bf5b9c72b884c6f0c4ed26ef204ee1f768b9437330422492c319470954bc4cc7"}, - {file = "gevent-24.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5de3c676e57177b38857f6e3cdfbe8f38d1cd754b63200c0615eaa31f514b4f"}, - {file = "gevent-24.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4faf846ed132fd7ebfbbf4fde588a62d21faa0faa06e6f468b7faa6f436b661"}, - {file = "gevent-24.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:368a277bd9278ddb0fde308e6a43f544222d76ed0c4166e0d9f6b036586819d9"}, - {file = "gevent-24.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f8a04cf0c5b7139bc6368b461257d4a757ea2fe89b3773e494d235b7dd51119f"}, - {file = "gevent-24.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9d8d0642c63d453179058abc4143e30718b19a85cbf58c2744c9a63f06a1d388"}, - {file = "gevent-24.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:94138682e68ec197db42ad7442d3cf9b328069c3ad8e4e5022e6b5cd3e7ffae5"}, - {file = "gevent-24.2.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:8f4b8e777d39013595a7740b4463e61b1cfe5f462f1b609b28fbc1e4c4ff01e5"}, - {file = "gevent-24.2.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141a2b24ad14f7b9576965c0c84927fc85f824a9bb19f6ec1e61e845d87c9cd8"}, - {file = "gevent-24.2.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9202f22ef811053077d01f43cc02b4aaf4472792f9fd0f5081b0b05c926cca19"}, - {file = "gevent-24.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2955eea9c44c842c626feebf4459c42ce168685aa99594e049d03bedf53c2800"}, - {file = "gevent-24.2.1-cp38-cp38-win32.whl", hash = "sha256:44098038d5e2749b0784aabb27f1fcbb3f43edebedf64d0af0d26955611be8d6"}, - {file = "gevent-24.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:117e5837bc74a1673605fb53f8bfe22feb6e5afa411f524c835b2ddf768db0de"}, - {file = "gevent-24.2.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:2ae3a25ecce0a5b0cd0808ab716bfca180230112bb4bc89b46ae0061d62d4afe"}, - {file = "gevent-24.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7ceb59986456ce851160867ce4929edaffbd2f069ae25717150199f8e1548b8"}, - {file = "gevent-24.2.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:2e9ac06f225b696cdedbb22f9e805e2dd87bf82e8fa5e17756f94e88a9d37cf7"}, - {file = "gevent-24.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:90cbac1ec05b305a1b90ede61ef73126afdeb5a804ae04480d6da12c56378df1"}, - {file = "gevent-24.2.1-cp39-cp39-win32.whl", hash = "sha256:782a771424fe74bc7e75c228a1da671578c2ba4ddb2ca09b8f959abdf787331e"}, - {file = "gevent-24.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:3adfb96637f44010be8abd1b5e73b5070f851b817a0b182e601202f20fa06533"}, - {file = "gevent-24.2.1-pp310-pypy310_pp73-macosx_11_0_universal2.whl", hash = "sha256:7b00f8c9065de3ad226f7979154a7b27f3b9151c8055c162332369262fc025d8"}, - {file = "gevent-24.2.1.tar.gz", hash = "sha256:432fc76f680acf7cf188c2ee0f5d3ab73b63c1f03114c7cd8a34cebbe5aa2056"}, -] - -[package.dependencies] -cffi = {version = ">=1.12.2", markers = "platform_python_implementation == \"CPython\" and sys_platform == \"win32\""} -greenlet = [ - {version = ">=3.0rc3", markers = "platform_python_implementation == \"CPython\" and python_version >= \"3.11\""}, - {version = ">=2.0.0", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.11\""}, -] -"zope.event" = "*" -"zope.interface" = "*" - -[package.extras] -dnspython = ["dnspython (>=1.16.0,<2.0)", "idna"] -docs = ["furo", "repoze.sphinx.autointerface", "sphinx", "sphinxcontrib-programoutput", "zope.schema"] -monitor = ["psutil (>=5.7.0)"] -recommended = ["cffi (>=1.12.2)", "dnspython (>=1.16.0,<2.0)", "idna", "psutil (>=5.7.0)"] -test = ["cffi (>=1.12.2)", "coverage (>=5.0)", "dnspython (>=1.16.0,<2.0)", "idna", "objgraph", "psutil (>=5.7.0)", "requests"] - -[[package]] -name = "geventhttpclient" -version = "2.0.2" -description = "http client library for gevent" -optional = false -python-versions = "*" -files = [ - {file = "geventhttpclient-2.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd76acdc7e7ee5c54c7b279f806b28957a6b092f79c40db34adcfd972749343c"}, - {file = "geventhttpclient-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:320a2c756d8a4f296de370476a1515485c186d9e22c3fc29e04f8f743a7d47bb"}, - {file = "geventhttpclient-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:36d3345c6585b09738195a7c45d279a87ccbab0350f1cce3679d3f0dce8577a1"}, - {file = "geventhttpclient-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:407d54499556c2741b93691b86da93232590b013f4a0b773327d766fe3e5c0a9"}, - {file = "geventhttpclient-2.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcf325131b0e4600b793643108cd85dddd66bbf532fd2eb498be5727ef532a1e"}, - {file = "geventhttpclient-2.0.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5841dd02e6f792a4ef15dbd04fefe620c831ba0b78105808160bb779a31af4"}, - {file = "geventhttpclient-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2ba69422d4e8670dd99803b1313ba574a4d41f52e92b512af51068c9c577bdc1"}, - {file = "geventhttpclient-2.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e3af579c6b46b9caa515a8baf6a2cadeafcd1d41ad22ca5712851f074a40b47"}, - {file = "geventhttpclient-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6ff7fc19f9a4fdd54a2b1c106a705ea2c679fa049685ed763051d417725bdab1"}, - {file = "geventhttpclient-2.0.2-cp310-cp310-win32.whl", hash = "sha256:eec7c52e8eb817674a193e0124486b507215d9e86d34f2638bf9a9292d16f815"}, - {file = "geventhttpclient-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:0e9f7283c01d970e643d89da81127869a8d94bb7a0081020dcad5b590bc007c4"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5ceb492d43a659b895794999dc40d0e7c23b1d41dd34040bbacd0dc264b57d5b"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:95959c201d3151fa8f57e0f1ce184476d1173996bdde41dc7d600006023dc5be"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:31c7febba298ecf44838561074a3fb7a01523adca286469b5a82dcc90e8d6a07"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:996c5f453d810b3c592160193d6832a065cca0112e92adc74e62df0e4c564df6"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f817e226c02b5a71d86de3772d6accdf250288d1e6825e426c713759830162d"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c55b7ac0ba0e1e1afbf297b7608f0b3a0bbc34fb4b0c19b7869f32a77ddc6209"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6775bc81e25c48fa58b034444aecfa508b0c3d1bc1e4ae546cc17661be1f51aa"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a0156882c73537bbbbc7c693ae44c9808119963174078692613ffa4feea21fcf"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3ebb582a291c4c5daaac2ea115b413f4be86874baa60def44d333301cee17bd7"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-win32.whl", hash = "sha256:716f1f72f50b841daf9c9511a01fc31a030866510a11863f27741e26e4f556a7"}, - {file = "geventhttpclient-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:777fcdb72077dfbf70516ecb9e9022246dd337b83a4c1e96f17f3ab9e15f4547"}, - {file = "geventhttpclient-2.0.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:379d90d8b1fcdda94e74d693806e0b0116c0610504e7f62d5576bac738dc66a5"}, - {file = "geventhttpclient-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00b7b2b836294c091c53789a469c5671202d79420b5191931df4e3a767d607fa"}, - {file = "geventhttpclient-2.0.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d075355862d7726eb3436f0136fce7650c884f2d04eaae7a39fed3aad9798bc"}, - {file = "geventhttpclient-2.0.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa7b1a27f950d209fe223a97906fe41312dc12c92372424639b8a9b96f1adf91"}, - {file = "geventhttpclient-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:fe4e06313aad353b103950780b050d3958000464cc732d621ff8ea3cacbd2bc4"}, - {file = "geventhttpclient-2.0.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:84d7be660b6bc53dd53e3f46b3bc5d275972a8116bd183a77139bb4d9d6d9fb1"}, - {file = "geventhttpclient-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:81f839d6becd664d0972b488422f5dc821f8ad2f2196d53aa5e4d799a3a35a66"}, - {file = "geventhttpclient-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:e707f62271a093e6e3af6f1bbd8cc398b414b8c508fe6b15505dd8e76c4409ac"}, - {file = "geventhttpclient-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:28d7655d1d50bc75ece683a0ae8faf978821d4aeae358d77b59371548db07f1e"}, - {file = "geventhttpclient-2.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58877b4440a580063571a23fbc616aed7c735c6bf9ef525c5129783df8b6966"}, - {file = "geventhttpclient-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57c993c4b2bea551c4a71b75ae1e172e9f3e4352f704ff1b619a0f16aa762f76"}, - {file = "geventhttpclient-2.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f67e789e31c7b1ce440cd1465dcdefeca29ba6108735eac0b1a593d3a55b7f"}, - {file = "geventhttpclient-2.0.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f3326e115ec7e7ce95a5d0d47698e8f3584944c4c434a7404937d56b17136b8"}, - {file = "geventhttpclient-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ef328ee3e7dca5055b833fdf3c181647a335abf0249947b27f5df2d95390198c"}, - {file = "geventhttpclient-2.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:27049ea40e3b559eee380310272aaa9b7c19e73c1d9e51e2ec137362be2caa70"}, - {file = "geventhttpclient-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b88a10538341e33fed1682c0dd4579c655d49db5863e7456583085a1cd6bd9d4"}, - {file = "geventhttpclient-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:d52aba2c38420b3fc518188449f1c2a46b1a99adf1c0266c68e72ee0422cd0fa"}, - {file = "geventhttpclient-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:3648626ca58ea4b340e695d78e5d533e6b8be78d375edbd42ff188bc3447e095"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fcf96e212b55b93490f3a5fcdfe7a2ef4995a0d13b7d9df398b11e319b7a86b1"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e9f2ff09706e3a64a99886d5f2595f3bf364821bc609f2865dbc3e499e21a36"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:721c3075897bfc81e918066f16ae3d1a88c7bb14eeeb831a4f89ea636474643e"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91615fed7931acd49cfe5fc30984acd5411dc1f2643b1544c879d1a537233c6d"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7adaa29e5699dea54e0224d1d2d9d8869668d8ad79f5b89433ff9c46f9424a6c"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9be5000ba57336a90b438782117c1e43205f51f49aa9b1499a82e210e8431b11"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:12d271cc53486efb3716e99855dc5cb84f2cd3fc9f3243721747bb39ec0fff8a"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b9c0c6b75b3905000d2490dc64b4c98a8bac155efbc0ff8917ac082ae0bad261"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e956a457d8831dc81d6f046ab09ebeec680f9a1e9c07e25a1906e77b287918ee"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-win32.whl", hash = "sha256:bc46d5479673dfb293ea428c057d2e23e48ebef5c5d44587cdbaada7f87553e4"}, - {file = "geventhttpclient-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:f44153e4b3ef9b901edcd14be54145a0058bf5fa371b3e583153865fac866245"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ebf98db9435824cf0b80b5247be6c88b20bfafd6249f7ebaabb85297da37e380"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c8b7298eb1ebd015257bf4503e34f5fbbe64bd83324140f76b511046aba5a0d5"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:60b81a6d4e65db7c1a5350c9fb72ebf800b478849a7e8020d1ab93af237a3747"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad6c2fcbc3733785bd3b8c2bb43d1f605f9085b0a8b70ce354d198f37143f884"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94edb022fa50d576cf63f6dd0c437c1acd24a719872a5935991aaf08f8e88cb2"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ca459cedb3827d960362e05ea3a4ae600a6d0d93de77eac2ac0f79828e5e18c"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7551b6db860b56411de1f96618e91b54f65e1a7be8d10255bd1adfb738bb6ee5"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bcb7e061c243308d9a44b02de5298001e917f1636a9f270c10da86601fcc8dfa"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:96922d170ef8933f4c20036e8d70d4fbe861f54c543e32e7459ebdbaafa65a2e"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ebb3c993903d40fd4bb1f3e55b84c62c8fc1d14433ae6d4d477dd9a325354c94"}, - {file = "geventhttpclient-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:dbccf1ba155dea3ea99ba0e67a835c05b4303f05298e85f5bb2a46700ccdf092"}, - {file = "geventhttpclient-2.0.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8770b8ab9e8c31d2aaf8a6fbc63fbb7239c58db10bb49cee191ca5c141c61542"}, - {file = "geventhttpclient-2.0.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daff1e977fccf98f27266d3891afdc101f1d705a48331754909e960bcae83f8a"}, - {file = "geventhttpclient-2.0.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2435e0f2a60e00d977822ec4c12e7851deb7aa49a23d32d648e72c641aae3b05"}, - {file = "geventhttpclient-2.0.2-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09acd03d0a8c1bb7d5a1cb6fcb77aaa19a907c1b4915ab58da5d283675edb0a5"}, - {file = "geventhttpclient-2.0.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:5d0813d97050446dab2fb243312e6c446e4ef5e9591befd597ef8f2887f8e2a8"}, - {file = "geventhttpclient-2.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:852da9bb0fc792cdca5ffc9327490094783e42415494b3569e5d532615027439"}, - {file = "geventhttpclient-2.0.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e79304a63a9d0512f2757c5862487b332b18a9c85feebecf6ebc3526c6dd1ba2"}, - {file = "geventhttpclient-2.0.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c1c783fce45f16db448d7e34864f1e9c22fe60a7780d2c1c14edbb1fb7262e"}, - {file = "geventhttpclient-2.0.2-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77c407c2b4bea817c6f752502db4ab0e9f9465b4fb85b459d1332b5f93a3096c"}, - {file = "geventhttpclient-2.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4f0d70a83ef4ab93102c6601477c13e9cdbc87205e5237fbf5797e30dc9d3ee8"}, - {file = "geventhttpclient-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b03f298ec19b8a4717cce8112fe30322c9e5bfada84dde61a1a44d1eeffc1d3c"}, - {file = "geventhttpclient-2.0.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2dc94b9a23eb6744a8c729aec2b1cdc4e39acf1d8f16ea85a62810aa6b2cae5"}, - {file = "geventhttpclient-2.0.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:805554594bb29231fd990cc2cbbe493d223d76a6085fec891dd76bb4e0928933"}, - {file = "geventhttpclient-2.0.2-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb23527d98f626ca7a4e8961ed9bdc6aed3388de306614c69a133b34262460f4"}, - {file = "geventhttpclient-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a594ab319872a38fb7f16be4cfb107d3c63c43a081f2abe241834e9877f27401"}, - {file = "geventhttpclient-2.0.2.tar.gz", hash = "sha256:8135a85200b170def7293d01dd1557931fcd1bec1ac78c52ad7cedd22368b9ba"}, -] - -[package.dependencies] -brotli = "*" -certifi = "*" -gevent = ">=0.13" -six = "*" - -[[package]] -name = "greenlet" -version = "3.0.3" -description = "Lightweight in-process concurrent programming" -optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - -[[package]] -name = "grpcio" -version = "1.65.0" -description = "HTTP/2-based RPC framework" -optional = false -python-versions = ">=3.8" -files = [ - {file = "grpcio-1.65.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:66ea0ca6108fcb391444bb7b37d04eac85bfaea1cfaf16db675d3734fc74ca1b"}, - {file = "grpcio-1.65.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:45d371dc4436fdcc31677f75b3ebe6175fbf0712ced49e0e4dfc18bbaf50f5a7"}, - {file = "grpcio-1.65.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:02dbbe113ec48581da07b7ddf52bfd49f5772374c4b5e36ea25131ce00b4f4f3"}, - {file = "grpcio-1.65.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5c9ee7b8f1ac82cc24f223cd7ec803c17079f90e63022d3e66c5e53fff0afb99"}, - {file = "grpcio-1.65.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da927f8a44e42837ae0027a3a063c85e2b26491d2babd4554e116f66fd46045d"}, - {file = "grpcio-1.65.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9916ea670a589f95f2453a4a5040294ace096271c126e684a1e45e61af76c988"}, - {file = "grpcio-1.65.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c46114787c5f530e845d2781f914600aade04b4f132dd012efb31bc4f76a72bb"}, - {file = "grpcio-1.65.0-cp310-cp310-win32.whl", hash = "sha256:1362d94ac9c05b202736180d23296840e00f495859b206261e6ed03a6d41978b"}, - {file = "grpcio-1.65.0-cp310-cp310-win_amd64.whl", hash = "sha256:00ed0828980009ce852d98230cdd2d5a22a4bcb946b5a0f6334dfd8258374cd7"}, - {file = "grpcio-1.65.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:25303f3747522252dd9cfcbacb88d828a36040f513e28fba17ee6184ebc3d330"}, - {file = "grpcio-1.65.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2a2b368717dd8e0f6cb7e412d3b3bfb0012f61c04b2f76dbed669b0f5cf3fb0c"}, - {file = "grpcio-1.65.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:93c41fb74c576dc0130b190a5775197282115c6abbe1d913d42d9a2f9d98fdae"}, - {file = "grpcio-1.65.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34eb4fb9ef4d11ea741d264916d1b31a9e169d539a6f1c8300e04c493eec747e"}, - {file = "grpcio-1.65.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55c41272f9d7d3503e3e3e93f3f98589f07075eebd24e1c291a1df2e8ef40a49"}, - {file = "grpcio-1.65.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c275bac926754022c89ef03f16470f65b811e2cc25f2167d365564ad43e31001"}, - {file = "grpcio-1.65.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b02db2a59071f4d05cfc4d0c972759778d27e1d3347f22ca178b91117ad10541"}, - {file = "grpcio-1.65.0-cp311-cp311-win32.whl", hash = "sha256:ec9f41b9b0eb6407a6edb21bc22cb32e03cae76cde9c1d8bb151ed77c2c5af94"}, - {file = "grpcio-1.65.0-cp311-cp311-win_amd64.whl", hash = "sha256:3efc8b0600870f5e518dd2738188b3ba7b1bb2668244c9a2a8c4debda4ffe62b"}, - {file = "grpcio-1.65.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:d787abafafa9ed71e17220d4178c883abdb380e0484bd8965cb2e06375c7495b"}, - {file = "grpcio-1.65.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:52347f21d6ec77d7e7e4d5037f5e8ac0a0c851856d9459f9f95b009c2c740b4a"}, - {file = "grpcio-1.65.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:b16e1cd9b9cb9ac942cb20b7a2b1c5d35b9e61017e2998bf242a6f7748071795"}, - {file = "grpcio-1.65.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89bc9c8c6743a48f115fea8f3fada76be269d1914bf636e5fdb7cec9cdf192bc"}, - {file = "grpcio-1.65.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5a2ae900e6423438c4a9a5be38e9228621340a18333371215c0419d24a254ef"}, - {file = "grpcio-1.65.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4f451091ddd28f00c655f0b1e208cca705d40e4fde56a3cf849fead61a700d10"}, - {file = "grpcio-1.65.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4e30cd885e02abb98d6b0d5beb6259a567b0ce1416c498ec815fe383adb77864"}, - {file = "grpcio-1.65.0-cp312-cp312-win32.whl", hash = "sha256:9a9a0ce10a07923ebd48c056060052ebddfbec3193cdd32207af358ef317b00a"}, - {file = "grpcio-1.65.0-cp312-cp312-win_amd64.whl", hash = "sha256:87d9350ffe1a84b7441db7c70fdb4e51269a379f7a95d696d0d133831c4f9a19"}, - {file = "grpcio-1.65.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:0c504b30fc2fba143d9254e0240243b5866df9b7523162448797f4b21b5f30d5"}, - {file = "grpcio-1.65.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:480be4d41ceb5a7f22ecfc8db1ab68aeb58cc1a2da0865a91917d3cd0438dac7"}, - {file = "grpcio-1.65.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:984a1627b50d5df4a24120302ca95adb5139ba1c40354ba258fc2913666d8ee7"}, - {file = "grpcio-1.65.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f242956c0f4985dfcc920cd251cd7a899ca168e157e98c9b74a688657e813ad6"}, - {file = "grpcio-1.65.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ea93f570b2341c69635b8a333afb99fb4d5584f26a9cc94f06e56c943648aab"}, - {file = "grpcio-1.65.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1bebefd76517a43d0e77a5dcd61a8b69e9775340d856a0b35c6368ae628f7714"}, - {file = "grpcio-1.65.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:356d10a491a92a08c21aef806379f7b020f591c23580e3d29aeeb59d45908c86"}, - {file = "grpcio-1.65.0-cp38-cp38-win32.whl", hash = "sha256:c3294fd3ef9faa1fe14ad15d72dd7d2ee9fee6d3bd29a08c53e59a3c94de9cc9"}, - {file = "grpcio-1.65.0-cp38-cp38-win_amd64.whl", hash = "sha256:a2defc49c984550f25034e88d17a7e69dba6deb2b981d8f56f19b3aaa788ff30"}, - {file = "grpcio-1.65.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:b73022222ed4bf718d3d8527a9b88b162074a62c7530d30f4e951b56304b0f19"}, - {file = "grpcio-1.65.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16e0f789158ecc8309e0a2f16cb8c5e4753f351a7673aab75f42783c83f1e38b"}, - {file = "grpcio-1.65.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:cb0bd8bfba21fe0318317bf11687c67a3f8ce726369c0b3ccf4e6607fc5bc5f2"}, - {file = "grpcio-1.65.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1096f0fa79ec601aefd71685d3a610cdde96274c38cd8adcef972660297669a"}, - {file = "grpcio-1.65.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e576a88ce82fea70e68c548aceb5cd560c27da50091581996858bbbe01230c83"}, - {file = "grpcio-1.65.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ab70bd1ccb05ef373b691a9b9985289d8b2cf63c704471f5ee132e228d351af5"}, - {file = "grpcio-1.65.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:03eab632a8ce8dba00d97482d2821bf752a7c3cb4dc051be6c587ad3ca1c3e6d"}, - {file = "grpcio-1.65.0-cp39-cp39-win32.whl", hash = "sha256:f19bb85795ca82e007be427e7b6ac5e730023ffbab69d39ddeb1b84c6339df16"}, - {file = "grpcio-1.65.0-cp39-cp39-win_amd64.whl", hash = "sha256:dbd7eeafa67d8e403ac61caa31ebda2861435dcfd7bb7953c4ef05ad2ecf74bf"}, - {file = "grpcio-1.65.0.tar.gz", hash = "sha256:2c7891f66daefc80cce1bed6bc0c2802d26dac46544ba1be79c4e7d85661dd73"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.65.0)"] - -[[package]] -name = "huggingface-hub" -version = "0.23.4" -description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "huggingface_hub-0.23.4-py3-none-any.whl", hash = "sha256:3a0b957aa87150addf0cc7bd71b4d954b78e749850e1e7fb29ebbd2db64ca037"}, - {file = "huggingface_hub-0.23.4.tar.gz", hash = "sha256:35d99016433900e44ae7efe1c209164a5a81dbbcd53a52f99c281dcd7ce22431"}, -] - -[package.dependencies] -filelock = "*" -fsspec = ">=2023.5.0" -packaging = ">=20.9" -pyyaml = ">=5.1" -requests = "*" -tqdm = ">=4.42.1" -typing-extensions = ">=3.7.4.3" - -[package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] -cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] -fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] -hf-transfer = ["hf-transfer (>=0.1.4)"] -inference = ["aiohttp", "minijinja (>=1.0)"] -quality = ["mypy (==1.5.1)", "ruff (>=0.3.0)"] -tensorflow = ["graphviz", "pydot", "tensorflow"] -tensorflow-testing = ["keras (<3.0)", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] -torch = ["safetensors", "torch"] -typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] - -[[package]] -name = "idna" -version = "3.7" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, -] - -[[package]] -name = "imageio" -version = "2.13.5" -description = "Library for reading and writing a wide range of image, video, scientific, and volumetric data formats." -optional = false -python-versions = ">=3.5" -files = [ - {file = "imageio-2.13.5-py3-none-any.whl", hash = "sha256:a3a18d5d01732557247fba5658d7f75425e97ce49c8fe2cd81bd348f5c71ffb2"}, - {file = "imageio-2.13.5.tar.gz", hash = "sha256:c7ec2be58e401b6eaa838f8eaf8368ed54b2de4a1b001fe6551644f1a30a843d"}, -] - -[package.dependencies] -numpy = "*" -pillow = ">=8.3.2" - -[package.extras] -build = ["wheel"] -dev = ["black", "flake8", "invoke", "pytest", "pytest-cov"] -docs = ["numpydoc", "pydata-sphinx-theme", "sphinx"] -ffmpeg = ["imageio-ffmpeg", "psutil"] -fits = ["astropy"] -full = ["astropy", "black", "flake8", "gdal", "imageio-ffmpeg", "invoke", "itk", "numpydoc", "psutil", "pydata-sphinx-theme", "pytest", "pytest-cov", "sphinx", "tifffile", "wheel"] -gdal = ["gdal"] -itk = ["itk"] -linting = ["black", "flake8"] -test = ["invoke", "pytest", "pytest-cov"] -tifffile = ["tifffile"] - -[[package]] -name = "imageio-ffmpeg" -version = "0.4.5" -description = "FFMPEG wrapper for Python" -optional = false -python-versions = ">=3.4" -files = [ - {file = "imageio-ffmpeg-0.4.5.tar.gz", hash = "sha256:f2ea4245a2adad25dedf98d343159579167e549ac8c4691cef5eff980e20c139"}, - {file = "imageio_ffmpeg-0.4.5-py3-none-macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:266601aab7619acf6ff78cd5ba78b5a593a1119a96d266d33b88bfcd01bbd3ca"}, - {file = "imageio_ffmpeg-0.4.5-py3-none-manylinux2010_x86_64.whl", hash = "sha256:f127b8cdd842e8398de5f2aef23c687ae75d4d964e1df2ea3a9ff03e92a370e7"}, - {file = "imageio_ffmpeg-0.4.5-py3-none-manylinux2014_aarch64.whl", hash = "sha256:db4d318f640419037a0df29bb11b1022f2f8094c90b4aac8affc7177b8ce4641"}, - {file = "imageio_ffmpeg-0.4.5-py3-none-win32.whl", hash = "sha256:39a9ab4326bdf5eae3457961dfdfb4317078659ebe4e6980914ac897a462aeb2"}, - {file = "imageio_ffmpeg-0.4.5-py3-none-win_amd64.whl", hash = "sha256:d2ba8339eecc02fa73a6b85c34654c49a7c78d732a1ac76478d11224e6cfa902"}, -] - -[[package]] -name = "intel-openmp" -version = "2021.4.0" -description = "Intel OpenMP* Runtime Library" -optional = false -python-versions = "*" -files = [ - {file = "intel_openmp-2021.4.0-py2.py3-none-macosx_10_15_x86_64.macosx_11_0_x86_64.whl", hash = "sha256:41c01e266a7fdb631a7609191709322da2bbf24b252ba763f125dd651bcc7675"}, - {file = "intel_openmp-2021.4.0-py2.py3-none-manylinux1_i686.whl", hash = "sha256:3b921236a38384e2016f0f3d65af6732cf2c12918087128a9163225451e776f2"}, - {file = "intel_openmp-2021.4.0-py2.py3-none-manylinux1_x86_64.whl", hash = "sha256:e2240ab8d01472fed04f3544a878cda5da16c26232b7ea1b59132dbfb48b186e"}, - {file = "intel_openmp-2021.4.0-py2.py3-none-win32.whl", hash = "sha256:6e863d8fd3d7e8ef389d52cf97a50fe2afe1a19247e8c0d168ce021546f96fc9"}, - {file = "intel_openmp-2021.4.0-py2.py3-none-win_amd64.whl", hash = "sha256:eef4c8bcc8acefd7f5cd3b9384dbf73d59e2c99fc56545712ded913f43c4a94f"}, -] - -[[package]] -name = "jinja2" -version = "3.1.4" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "joblib" -version = "1.4.2" -description = "Lightweight pipelining with Python functions" -optional = false -python-versions = ">=3.8" -files = [ - {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, - {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, -] - -[[package]] -name = "lazy-loader" -version = "0.4" -description = "Makes it easy to load subpackages and functions on demand." -optional = false -python-versions = ">=3.7" -files = [ - {file = "lazy_loader-0.4-py3-none-any.whl", hash = "sha256:342aa8e14d543a154047afb4ba8ef17f5563baad3fc610d7b15b213b0f119efc"}, - {file = "lazy_loader-0.4.tar.gz", hash = "sha256:47c75182589b91a4e1a85a136c074285a5ad4d9f39c63e0d7fb76391c4574cd1"}, -] - -[package.dependencies] -packaging = "*" - -[package.extras] -dev = ["changelist (==0.5)"] -lint = ["pre-commit (==3.7.0)"] -test = ["pytest (>=7.4)", "pytest-cov (>=4.1)"] - -[[package]] -name = "librosa" -version = "0.8.1" -description = "Python module for audio and music processing" -optional = false -python-versions = ">=3.6" -files = [ - {file = "librosa-0.8.1-py3-none-any.whl", hash = "sha256:fd381e2d7067d4d4cf7691f2ef3620ef62a8aa6445dcf407e3328254692f742a"}, - {file = "librosa-0.8.1.tar.gz", hash = "sha256:c53d05e768ae4a3e553ae21c2e5015293e5efbfd5c12d497f1104cb519cca6b3"}, -] - -[package.dependencies] -audioread = ">=2.0.0" -decorator = ">=3.0.0" -joblib = ">=0.14" -numba = ">=0.43.0" -numpy = ">=1.15.0" -packaging = ">=20.0" -pooch = ">=1.0" -resampy = ">=0.2.2" -scikit-learn = ">=0.14.0,<0.19.0 || >0.19.0" -scipy = ">=1.0.0" -soundfile = ">=0.10.2" - -[package.extras] -display = ["matplotlib (>=1.5)"] -docs = ["matplotlib (>=2.0.0,<3.3)", "numba (<0.50)", "numpydoc", "presets", "sphinx (!=1.3.1)", "sphinx-gallery (>=0.7)", "sphinx-multiversion (>=0.2.3)", "sphinx-rtd-theme (==0.5.*)", "spinxcontrib-svg2pdfconverter"] -tests = ["contextlib2", "matplotlib (>=3.0)", "pytest", "pytest-cov", "pytest-mpl", "samplerate", "soxr"] - -[[package]] -name = "llvmlite" -version = "0.43.0" -description = "lightweight wrapper around basic LLVM functionality" -optional = false -python-versions = ">=3.9" -files = [ - {file = "llvmlite-0.43.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a289af9a1687c6cf463478f0fa8e8aa3b6fb813317b0d70bf1ed0759eab6f761"}, - {file = "llvmlite-0.43.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d4fd101f571a31acb1559ae1af30f30b1dc4b3186669f92ad780e17c81e91bc"}, - {file = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d434ec7e2ce3cc8f452d1cd9a28591745de022f931d67be688a737320dfcead"}, - {file = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6912a87782acdff6eb8bf01675ed01d60ca1f2551f8176a300a886f09e836a6a"}, - {file = "llvmlite-0.43.0-cp310-cp310-win_amd64.whl", hash = "sha256:14f0e4bf2fd2d9a75a3534111e8ebeb08eda2f33e9bdd6dfa13282afacdde0ed"}, - {file = "llvmlite-0.43.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3e8d0618cb9bfe40ac38a9633f2493d4d4e9fcc2f438d39a4e854f39cc0f5f98"}, - {file = "llvmlite-0.43.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0a9a1a39d4bf3517f2af9d23d479b4175ead205c592ceeb8b89af48a327ea57"}, - {file = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1da416ab53e4f7f3bc8d4eeba36d801cc1894b9fbfbf2022b29b6bad34a7df2"}, - {file = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977525a1e5f4059316b183fb4fd34fa858c9eade31f165427a3977c95e3ee749"}, - {file = "llvmlite-0.43.0-cp311-cp311-win_amd64.whl", hash = "sha256:d5bd550001d26450bd90777736c69d68c487d17bf371438f975229b2b8241a91"}, - {file = "llvmlite-0.43.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f99b600aa7f65235a5a05d0b9a9f31150c390f31261f2a0ba678e26823ec38f7"}, - {file = "llvmlite-0.43.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:35d80d61d0cda2d767f72de99450766250560399edc309da16937b93d3b676e7"}, - {file = "llvmlite-0.43.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eccce86bba940bae0d8d48ed925f21dbb813519169246e2ab292b5092aba121f"}, - {file = "llvmlite-0.43.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df6509e1507ca0760787a199d19439cc887bfd82226f5af746d6977bd9f66844"}, - {file = "llvmlite-0.43.0-cp312-cp312-win_amd64.whl", hash = "sha256:7a2872ee80dcf6b5dbdc838763d26554c2a18aa833d31a2635bff16aafefb9c9"}, - {file = "llvmlite-0.43.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cd2a7376f7b3367019b664c21f0c61766219faa3b03731113ead75107f3b66c"}, - {file = "llvmlite-0.43.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18e9953c748b105668487b7c81a3e97b046d8abf95c4ddc0cd3c94f4e4651ae8"}, - {file = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74937acd22dc11b33946b67dca7680e6d103d6e90eeaaaf932603bec6fe7b03a"}, - {file = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9efc739cc6ed760f795806f67889923f7274276f0eb45092a1473e40d9b867"}, - {file = "llvmlite-0.43.0-cp39-cp39-win_amd64.whl", hash = "sha256:47e147cdda9037f94b399bf03bfd8a6b6b1f2f90be94a454e3386f006455a9b4"}, - {file = "llvmlite-0.43.0.tar.gz", hash = "sha256:ae2b5b5c3ef67354824fb75517c8db5fbe93bc02cd9671f3c62271626bc041d5"}, -] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "mkl" -version = "2021.4.0" -description = "Intel® oneAPI Math Kernel Library" -optional = false -python-versions = "*" -files = [ - {file = "mkl-2021.4.0-py2.py3-none-macosx_10_15_x86_64.macosx_11_0_x86_64.whl", hash = "sha256:67460f5cd7e30e405b54d70d1ed3ca78118370b65f7327d495e9c8847705e2fb"}, - {file = "mkl-2021.4.0-py2.py3-none-manylinux1_i686.whl", hash = "sha256:636d07d90e68ccc9630c654d47ce9fdeb036bb46e2b193b3a9ac8cfea683cce5"}, - {file = "mkl-2021.4.0-py2.py3-none-manylinux1_x86_64.whl", hash = "sha256:398dbf2b0d12acaf54117a5210e8f191827f373d362d796091d161f610c1ebfb"}, - {file = "mkl-2021.4.0-py2.py3-none-win32.whl", hash = "sha256:439c640b269a5668134e3dcbcea4350459c4a8bc46469669b2d67e07e3d330e8"}, - {file = "mkl-2021.4.0-py2.py3-none-win_amd64.whl", hash = "sha256:ceef3cafce4c009dd25f65d7ad0d833a0fbadc3d8903991ec92351fe5de1e718"}, -] - -[package.dependencies] -intel-openmp = "==2021.*" -tbb = "==2021.*" - -[[package]] -name = "moviepy" -version = "1.0.3" -description = "Video editing with Python" -optional = false -python-versions = "*" -files = [ - {file = "moviepy-1.0.3.tar.gz", hash = "sha256:2884e35d1788077db3ff89e763c5ba7bfddbd7ae9108c9bc809e7ba58fa433f5"}, -] - -[package.dependencies] -decorator = ">=4.0.2,<5.0" -imageio = {version = ">=2.5,<3.0", markers = "python_version >= \"3.4\""} -imageio_ffmpeg = {version = ">=0.2.0", markers = "python_version >= \"3.4\""} -numpy = {version = ">=1.17.3", markers = "python_version > \"2.7\""} -proglog = "<=1.0.0" -requests = ">=2.8.1,<3.0" -tqdm = ">=4.11.2,<5.0" - -[package.extras] -doc = ["Sphinx (>=1.5.2,<2.0)", "numpydoc (>=0.6.0,<1.0)", "pygame (>=1.9.3,<2.0)", "sphinx_rtd_theme (>=0.1.10b0,<1.0)"] -optional = ["matplotlib (>=2.0.0,<3.0)", "opencv-python (>=3.0,<4.0)", "scikit-image (>=0.13.0,<1.0)", "scikit-learn", "scipy (>=0.19.0,<1.5)", "youtube_dl"] -test = ["coverage (<5.0)", "coveralls (>=1.1,<2.0)", "pytest (>=3.0.0,<4.0)", "pytest-cov (>=2.5.1,<3.0)", "requests (>=2.8.1,<3.0)"] - -[[package]] -name = "mpmath" -version = "1.3.0" -description = "Python library for arbitrary-precision floating-point arithmetic" -optional = false -python-versions = "*" -files = [ - {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, - {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, -] - -[package.extras] -develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] -docs = ["sphinx"] -gmpy = ["gmpy2 (>=2.1.0a4)"] -tests = ["pytest (>=4.6)"] - -[[package]] -name = "multidict" -version = "6.0.5" -description = "multidict implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, -] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "networkx" -version = "3.3" -description = "Python package for creating and manipulating graphs and networks" -optional = false -python-versions = ">=3.10" -files = [ - {file = "networkx-3.3-py3-none-any.whl", hash = "sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2"}, - {file = "networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9"}, -] - -[package.extras] -default = ["matplotlib (>=3.6)", "numpy (>=1.23)", "pandas (>=1.4)", "scipy (>=1.9,!=1.11.0,!=1.11.1)"] -developer = ["changelist (==0.5)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] -doc = ["myst-nb (>=1.0)", "numpydoc (>=1.7)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.14)", "sphinx (>=7)", "sphinx-gallery (>=0.14)", "texext (>=0.6.7)"] -extra = ["lxml (>=4.6)", "pydot (>=2.0)", "pygraphviz (>=1.12)", "sympy (>=1.10)"] -test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] - -[[package]] -name = "numba" -version = "0.60.0" -description = "compiling Python code using LLVM" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numba-0.60.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d761de835cd38fb400d2c26bb103a2726f548dc30368853121d66201672e651"}, - {file = "numba-0.60.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:159e618ef213fba758837f9837fb402bbe65326e60ba0633dbe6c7f274d42c1b"}, - {file = "numba-0.60.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1527dc578b95c7c4ff248792ec33d097ba6bef9eda466c948b68dfc995c25781"}, - {file = "numba-0.60.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe0b28abb8d70f8160798f4de9d486143200f34458d34c4a214114e445d7124e"}, - {file = "numba-0.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:19407ced081d7e2e4b8d8c36aa57b7452e0283871c296e12d798852bc7d7f198"}, - {file = "numba-0.60.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a17b70fc9e380ee29c42717e8cc0bfaa5556c416d94f9aa96ba13acb41bdece8"}, - {file = "numba-0.60.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fb02b344a2a80efa6f677aa5c40cd5dd452e1b35f8d1c2af0dfd9ada9978e4b"}, - {file = "numba-0.60.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f4fde652ea604ea3c86508a3fb31556a6157b2c76c8b51b1d45eb40c8598703"}, - {file = "numba-0.60.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4142d7ac0210cc86432b818338a2bc368dc773a2f5cf1e32ff7c5b378bd63ee8"}, - {file = "numba-0.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:cac02c041e9b5bc8cf8f2034ff6f0dbafccd1ae9590dc146b3a02a45e53af4e2"}, - {file = "numba-0.60.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7da4098db31182fc5ffe4bc42c6f24cd7d1cb8a14b59fd755bfee32e34b8404"}, - {file = "numba-0.60.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38d6ea4c1f56417076ecf8fc327c831ae793282e0ff51080c5094cb726507b1c"}, - {file = "numba-0.60.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:62908d29fb6a3229c242e981ca27e32a6e606cc253fc9e8faeb0e48760de241e"}, - {file = "numba-0.60.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0ebaa91538e996f708f1ab30ef4d3ddc344b64b5227b67a57aa74f401bb68b9d"}, - {file = "numba-0.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:f75262e8fe7fa96db1dca93d53a194a38c46da28b112b8a4aca168f0df860347"}, - {file = "numba-0.60.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:01ef4cd7d83abe087d644eaa3d95831b777aa21d441a23703d649e06b8e06b74"}, - {file = "numba-0.60.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:819a3dfd4630d95fd574036f99e47212a1af41cbcb019bf8afac63ff56834449"}, - {file = "numba-0.60.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0b983bd6ad82fe868493012487f34eae8bf7dd94654951404114f23c3466d34b"}, - {file = "numba-0.60.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c151748cd269ddeab66334bd754817ffc0cabd9433acb0f551697e5151917d25"}, - {file = "numba-0.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:3031547a015710140e8c87226b4cfe927cac199835e5bf7d4fe5cb64e814e3ab"}, - {file = "numba-0.60.0.tar.gz", hash = "sha256:5df6158e5584eece5fc83294b949fd30b9f1125df7708862205217e068aabf16"}, -] - -[package.dependencies] -llvmlite = "==0.43.*" -numpy = ">=1.22,<2.1" - -[[package]] -name = "numpy" -version = "1.22.4" -description = "NumPy is the fundamental package for array computing with Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "numpy-1.22.4-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:ba9ead61dfb5d971d77b6c131a9dbee62294a932bf6a356e48c75ae684e635b3"}, - {file = "numpy-1.22.4-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:1ce7ab2053e36c0a71e7a13a7475bd3b1f54750b4b433adc96313e127b870887"}, - {file = "numpy-1.22.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7228ad13744f63575b3a972d7ee4fd61815b2879998e70930d4ccf9ec721dce0"}, - {file = "numpy-1.22.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43a8ca7391b626b4c4fe20aefe79fec683279e31e7c79716863b4b25021e0e74"}, - {file = "numpy-1.22.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a911e317e8c826ea632205e63ed8507e0dc877dcdc49744584dfc363df9ca08c"}, - {file = "numpy-1.22.4-cp310-cp310-win32.whl", hash = "sha256:9ce7df0abeabe7fbd8ccbf343dc0db72f68549856b863ae3dd580255d009648e"}, - {file = "numpy-1.22.4-cp310-cp310-win_amd64.whl", hash = "sha256:3e1ffa4748168e1cc8d3cde93f006fe92b5421396221a02f2274aab6ac83b077"}, - {file = "numpy-1.22.4-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:59d55e634968b8f77d3fd674a3cf0b96e85147cd6556ec64ade018f27e9479e1"}, - {file = "numpy-1.22.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c1d937820db6e43bec43e8d016b9b3165dcb42892ea9f106c70fb13d430ffe72"}, - {file = "numpy-1.22.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4c5d5eb2ec8da0b4f50c9a843393971f31f1d60be87e0fb0917a49133d257d6"}, - {file = "numpy-1.22.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64f56fc53a2d18b1924abd15745e30d82a5782b2cab3429aceecc6875bd5add0"}, - {file = "numpy-1.22.4-cp38-cp38-win32.whl", hash = "sha256:fb7a980c81dd932381f8228a426df8aeb70d59bbcda2af075b627bbc50207cba"}, - {file = "numpy-1.22.4-cp38-cp38-win_amd64.whl", hash = "sha256:e96d7f3096a36c8754207ab89d4b3282ba7b49ea140e4973591852c77d09eb76"}, - {file = "numpy-1.22.4-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:4c6036521f11a731ce0648f10c18ae66d7143865f19f7299943c985cdc95afb5"}, - {file = "numpy-1.22.4-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:b89bf9b94b3d624e7bb480344e91f68c1c6c75f026ed6755955117de00917a7c"}, - {file = "numpy-1.22.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2d487e06ecbf1dc2f18e7efce82ded4f705f4bd0cd02677ffccfb39e5c284c7e"}, - {file = "numpy-1.22.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3eb268dbd5cfaffd9448113539e44e2dd1c5ca9ce25576f7c04a5453edc26fa"}, - {file = "numpy-1.22.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37431a77ceb9307c28382c9773da9f306435135fae6b80b62a11c53cfedd8802"}, - {file = "numpy-1.22.4-cp39-cp39-win32.whl", hash = "sha256:cc7f00008eb7d3f2489fca6f334ec19ca63e31371be28fd5dad955b16ec285bd"}, - {file = "numpy-1.22.4-cp39-cp39-win_amd64.whl", hash = "sha256:f0725df166cf4785c0bc4cbfb320203182b1ecd30fee6e541c8752a92df6aa32"}, - {file = "numpy-1.22.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0791fbd1e43bf74b3502133207e378901272f3c156c4df4954cad833b1380207"}, - {file = "numpy-1.22.4.zip", hash = "sha256:425b390e4619f58d8526b3dcf656dde069133ae5c240229821f01b5f44ea07af"}, -] - -[[package]] -name = "nvidia-cublas-cu12" -version = "12.1.3.1" -description = "CUBLAS native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:ee53ccca76a6fc08fb9701aa95b6ceb242cdaab118c3bb152af4e579af792728"}, - {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-win_amd64.whl", hash = "sha256:2b964d60e8cf11b5e1073d179d85fa340c120e99b3067558f3cf98dd69d02906"}, -] - -[[package]] -name = "nvidia-cuda-cupti-cu12" -version = "12.1.105" -description = "CUDA profiling tools runtime libs." -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:e54fde3983165c624cb79254ae9818a456eb6e87a7fd4d56a2352c24ee542d7e"}, - {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:bea8236d13a0ac7190bd2919c3e8e6ce1e402104276e6f9694479e48bb0eb2a4"}, -] - -[[package]] -name = "nvidia-cuda-nvrtc-cu12" -version = "12.1.105" -description = "NVRTC native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:339b385f50c309763ca65456ec75e17bbefcbbf2893f462cb8b90584cd27a1c2"}, - {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:0a98a522d9ff138b96c010a65e145dc1b4850e9ecb75a0172371793752fd46ed"}, -] - -[[package]] -name = "nvidia-cuda-runtime-cu12" -version = "12.1.105" -description = "CUDA Runtime native Libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:6e258468ddf5796e25f1dc591a31029fa317d97a0a94ed93468fc86301d61e40"}, - {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:dfb46ef84d73fababab44cf03e3b83f80700d27ca300e537f85f636fac474344"}, -] - -[[package]] -name = "nvidia-cudnn-cu12" -version = "8.9.2.26" -description = "cuDNN runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl", hash = "sha256:5ccb288774fdfb07a7e7025ffec286971c06d8d7b4fb162525334616d7629ff9"}, -] - -[package.dependencies] -nvidia-cublas-cu12 = "*" - -[[package]] -name = "nvidia-cufft-cu12" -version = "11.0.2.54" -description = "CUFFT native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl", hash = "sha256:794e3948a1aa71fd817c3775866943936774d1c14e7628c74f6f7417224cdf56"}, - {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-win_amd64.whl", hash = "sha256:d9ac353f78ff89951da4af698f80870b1534ed69993f10a4cf1d96f21357e253"}, -] - -[[package]] -name = "nvidia-curand-cu12" -version = "10.3.2.106" -description = "CURAND native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:9d264c5036dde4e64f1de8c50ae753237c12e0b1348738169cd0f8a536c0e1e0"}, - {file = "nvidia_curand_cu12-10.3.2.106-py3-none-win_amd64.whl", hash = "sha256:75b6b0c574c0037839121317e17fd01f8a69fd2ef8e25853d826fec30bdba74a"}, -] - -[[package]] -name = "nvidia-cusolver-cu12" -version = "11.4.5.107" -description = "CUDA solver native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl", hash = "sha256:8a7ec542f0412294b15072fa7dab71d31334014a69f953004ea7a118206fe0dd"}, - {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-win_amd64.whl", hash = "sha256:74e0c3a24c78612192a74fcd90dd117f1cf21dea4822e66d89e8ea80e3cd2da5"}, -] - -[package.dependencies] -nvidia-cublas-cu12 = "*" -nvidia-cusparse-cu12 = "*" -nvidia-nvjitlink-cu12 = "*" - -[[package]] -name = "nvidia-cusparse-cu12" -version = "12.1.0.106" -description = "CUSPARSE native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:f3b50f42cf363f86ab21f720998517a659a48131e8d538dc02f8768237bd884c"}, - {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-win_amd64.whl", hash = "sha256:b798237e81b9719373e8fae8d4f091b70a0cf09d9d85c95a557e11df2d8e9a5a"}, -] - -[package.dependencies] -nvidia-nvjitlink-cu12 = "*" - -[[package]] -name = "nvidia-nccl-cu12" -version = "2.20.5" -description = "NVIDIA Collective Communication Library (NCCL) Runtime" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1fc150d5c3250b170b29410ba682384b14581db722b2531b0d8d33c595f33d01"}, - {file = "nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:057f6bf9685f75215d0c53bf3ac4a10b3e6578351de307abad9e18a99182af56"}, -] - -[[package]] -name = "nvidia-nvjitlink-cu12" -version = "12.5.82" -description = "Nvidia JIT LTO Library" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_nvjitlink_cu12-12.5.82-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f9b37bc5c8cf7509665cb6ada5aaa0ce65618f2332b7d3e78e9790511f111212"}, - {file = "nvidia_nvjitlink_cu12-12.5.82-py3-none-win_amd64.whl", hash = "sha256:e782564d705ff0bf61ac3e1bf730166da66dd2fe9012f111ede5fc49b64ae697"}, -] - -[[package]] -name = "nvidia-nvtx-cu12" -version = "12.1.105" -description = "NVIDIA Tools Extension" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:dc21cf308ca5691e7c04d962e213f8a4aa9bbfa23d95412f452254c2caeb09e5"}, - {file = "nvidia_nvtx_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:65f4d98982b31b60026e0e6de73fbdfc09d08a96f4656dd3665ca616a11e1e82"}, -] - -[[package]] -name = "nvidia-pytriton" -version = "0.4.2" -description = "PyTriton - Flask/FastAPI-like interface to simplify Triton's deployment in Python environments." -optional = false -python-versions = "<4,>=3.8" -files = [ - {file = "nvidia_pytriton-0.4.2-py3-none-manylinux_2_35_aarch64.whl", hash = "sha256:0d204fa71774c21768c8f985d98aa55d80945fa7066b189e8885035517635956"}, - {file = "nvidia_pytriton-0.4.2-py3-none-manylinux_2_35_x86_64.whl", hash = "sha256:8f79e8bda28961a49d5c64474c17664e10b1bb08018b7ee7bcbd81bcff86e266"}, -] - -[package.dependencies] -numpy = ">=1.21,<2.0" -protobuf = ">=3.7.0" -pyzmq = ">=23.0,<24.0" -sh = ">=1.14,<2.0" -tritonclient = {version = ">=2.39,<3.0", extras = ["all"]} -typing-inspect = ">=0.6.0,<0.7.0" -wrapt = ">=1.11.0" - -[package.extras] -dev = ["black (>=22.8)", "build (>=0.8,<1.0.0)", "ipython (>=7.16)", "isort (>=5.10)", "nvidia-pytriton[doc]", "nvidia-pytriton[test]", "pip (>=21.3)", "pudb (>=2022.1.3)", "twine (>=4.0)"] -doc = ["GitPython (>=3.1.30)", "mike (>=2.0.0)", "mkdocs-htmlproofer-plugin (>=0.8.0)", "mkdocs-material (>=8.5.6)", "mkdocstrings[python] (>=0.19.0)"] -test = ["alt-pytest-asyncio (>=0.7,<1.0)", "pre-commit (>=2.20.0)", "psutil (>=5.1,<6.0)", "py-spy (>=0.3,<1.0)", "pytest (>=7.2,<8.0)", "pytest-codeblocks (>=0.16,<1.0)", "pytest-mock (>=3.8,<4.0)", "pytest-timeout (>=2.1,<3.0)", "pytype (!=2021.11.18,!=2022.2.17)", "tox (>=3.23.1)", "tqdm (>=4.64.1)"] - -[[package]] -name = "opencv-python" -version = "4.7.0.72" -description = "Wrapper package for OpenCV python bindings." -optional = false -python-versions = ">=3.6" -files = [ - {file = "opencv-python-4.7.0.72.tar.gz", hash = "sha256:3424794a711f33284581f3c1e4b071cfc827d02b99d6fd9a35391f517c453306"}, - {file = "opencv_python-4.7.0.72-cp37-abi3-macosx_10_16_x86_64.whl", hash = "sha256:d4f8880440c433a0025d78804dda6901d1e8e541a561dda66892d90290aef881"}, - {file = "opencv_python-4.7.0.72-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:7a297e7651e22eb17c265ddbbc80e2ba2a8ff4f4a1696a67c45e5f5798245842"}, - {file = "opencv_python-4.7.0.72-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd08343654c6b88c5a8c25bf425f8025aed2e3189b4d7306b5861d32affaf737"}, - {file = "opencv_python-4.7.0.72-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebfc0a3a2f57716e709028b992e4de7fd8752105d7a768531c4f434043c6f9ff"}, - {file = "opencv_python-4.7.0.72-cp37-abi3-win32.whl", hash = "sha256:eda115797b114fc16ca6f182b91c5d984f0015c19bec3145e55d33d708e9bae1"}, - {file = "opencv_python-4.7.0.72-cp37-abi3-win_amd64.whl", hash = "sha256:812af57553ec1c6709060c63f6b7e9ad07ddc0f592f3ccc6d00c71e0fe0e6376"}, -] - -[package.dependencies] -numpy = {version = ">=1.22.0", markers = "python_version >= \"3.11\""} - -[[package]] -name = "opencv-python" -version = "4.10.0.84" -description = "Wrapper package for OpenCV python bindings." -optional = false -python-versions = ">=3.6" -files = [ - {file = "opencv-python-4.10.0.84.tar.gz", hash = "sha256:72d234e4582e9658ffea8e9cae5b63d488ad06994ef12d81dc303b17472f3526"}, - {file = "opencv_python-4.10.0.84-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:fc182f8f4cda51b45f01c64e4cbedfc2f00aff799debebc305d8d0210c43f251"}, - {file = "opencv_python-4.10.0.84-cp37-abi3-macosx_12_0_x86_64.whl", hash = "sha256:71e575744f1d23f79741450254660442785f45a0797212852ee5199ef12eed98"}, - {file = "opencv_python-4.10.0.84-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09a332b50488e2dda866a6c5573ee192fe3583239fb26ff2f7f9ceb0bc119ea6"}, - {file = "opencv_python-4.10.0.84-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ace140fc6d647fbe1c692bcb2abce768973491222c067c131d80957c595b71f"}, - {file = "opencv_python-4.10.0.84-cp37-abi3-win32.whl", hash = "sha256:2db02bb7e50b703f0a2d50c50ced72e95c574e1e5a0bb35a8a86d0b35c98c236"}, - {file = "opencv_python-4.10.0.84-cp37-abi3-win_amd64.whl", hash = "sha256:32dbbd94c26f611dc5cc6979e6b7aa1f55a64d6b463cc1dcd3c95505a63e48fe"}, -] - -[package.dependencies] -numpy = [ - {version = ">=1.21.4", markers = "python_version >= \"3.10\" and platform_system == \"Darwin\" and python_version < \"3.11\""}, - {version = ">=1.21.2", markers = "platform_system != \"Darwin\" and python_version >= \"3.10\" and python_version < \"3.11\""}, -] - -[[package]] -name = "packaging" -version = "24.1" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, -] - -[[package]] -name = "pandas" -version = "1.3.5" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.7.1" -files = [ - {file = "pandas-1.3.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:62d5b5ce965bae78f12c1c0df0d387899dd4211ec0bdc52822373f13a3a022b9"}, - {file = "pandas-1.3.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:adfeb11be2d54f275142c8ba9bf67acee771b7186a5745249c7d5a06c670136b"}, - {file = "pandas-1.3.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60a8c055d58873ad81cae290d974d13dd479b82cbb975c3e1fa2cf1920715296"}, - {file = "pandas-1.3.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd541ab09e1f80a2a1760032d665f6e032d8e44055d602d65eeea6e6e85498cb"}, - {file = "pandas-1.3.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2651d75b9a167cc8cc572cf787ab512d16e316ae00ba81874b560586fa1325e0"}, - {file = "pandas-1.3.5-cp310-cp310-win_amd64.whl", hash = "sha256:aaf183a615ad790801fa3cf2fa450e5b6d23a54684fe386f7e3208f8b9bfbef6"}, - {file = "pandas-1.3.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:344295811e67f8200de2390093aeb3c8309f5648951b684d8db7eee7d1c81fb7"}, - {file = "pandas-1.3.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:552020bf83b7f9033b57cbae65589c01e7ef1544416122da0c79140c93288f56"}, - {file = "pandas-1.3.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cce0c6bbeb266b0e39e35176ee615ce3585233092f685b6a82362523e59e5b4"}, - {file = "pandas-1.3.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d28a3c65463fd0d0ba8bbb7696b23073efee0510783340a44b08f5e96ffce0c"}, - {file = "pandas-1.3.5-cp37-cp37m-win32.whl", hash = "sha256:a62949c626dd0ef7de11de34b44c6475db76995c2064e2d99c6498c3dba7fe58"}, - {file = "pandas-1.3.5-cp37-cp37m-win_amd64.whl", hash = "sha256:8025750767e138320b15ca16d70d5cdc1886e8f9cc56652d89735c016cd8aea6"}, - {file = "pandas-1.3.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fe95bae4e2d579812865db2212bb733144e34d0c6785c0685329e5b60fcb85dd"}, - {file = "pandas-1.3.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f261553a1e9c65b7a310302b9dbac31cf0049a51695c14ebe04e4bfd4a96f02"}, - {file = "pandas-1.3.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b6dbec5f3e6d5dc80dcfee250e0a2a652b3f28663492f7dab9a24416a48ac39"}, - {file = "pandas-1.3.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3bc49af96cd6285030a64779de5b3688633a07eb75c124b0747134a63f4c05f"}, - {file = "pandas-1.3.5-cp38-cp38-win32.whl", hash = "sha256:b6b87b2fb39e6383ca28e2829cddef1d9fc9e27e55ad91ca9c435572cdba51bf"}, - {file = "pandas-1.3.5-cp38-cp38-win_amd64.whl", hash = "sha256:a395692046fd8ce1edb4c6295c35184ae0c2bbe787ecbe384251da609e27edcb"}, - {file = "pandas-1.3.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bd971a3f08b745a75a86c00b97f3007c2ea175951286cdda6abe543e687e5f2f"}, - {file = "pandas-1.3.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37f06b59e5bc05711a518aa10beaec10942188dccb48918bb5ae602ccbc9f1a0"}, - {file = "pandas-1.3.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c21778a688d3712d35710501f8001cdbf96eb70a7c587a3d5613573299fdca6"}, - {file = "pandas-1.3.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3345343206546545bc26a05b4602b6a24385b5ec7c75cb6059599e3d56831da2"}, - {file = "pandas-1.3.5-cp39-cp39-win32.whl", hash = "sha256:c69406a2808ba6cf580c2255bcf260b3f214d2664a3a4197d0e640f573b46fd3"}, - {file = "pandas-1.3.5-cp39-cp39-win_amd64.whl", hash = "sha256:32e1a26d5ade11b547721a72f9bfc4bd113396947606e00d5b4a5b79b3dcb006"}, - {file = "pandas-1.3.5.tar.gz", hash = "sha256:1e4285f5de1012de20ca46b188ccf33521bff61ba5c5ebd78b4fb28e5416a9f1"}, -] - -[package.dependencies] -numpy = {version = ">=1.21.0", markers = "python_version >= \"3.10\""} -python-dateutil = ">=2.7.3" -pytz = ">=2017.3" - -[package.extras] -test = ["hypothesis (>=3.58)", "pytest (>=6.0)", "pytest-xdist"] - -[[package]] -name = "pillow" -version = "9.1.0" -description = "Python Imaging Library (Fork)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "Pillow-9.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:af79d3fde1fc2e33561166d62e3b63f0cc3e47b5a3a2e5fea40d4917754734ea"}, - {file = "Pillow-9.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:55dd1cf09a1fd7c7b78425967aacae9b0d70125f7d3ab973fadc7b5abc3de652"}, - {file = "Pillow-9.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66822d01e82506a19407d1afc104c3fcea3b81d5eb11485e593ad6b8492f995a"}, - {file = "Pillow-9.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5eaf3b42df2bcda61c53a742ee2c6e63f777d0e085bbc6b2ab7ed57deb13db7"}, - {file = "Pillow-9.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01ce45deec9df310cbbee11104bae1a2a43308dd9c317f99235b6d3080ddd66e"}, - {file = "Pillow-9.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aea7ce61328e15943d7b9eaca87e81f7c62ff90f669116f857262e9da4057ba3"}, - {file = "Pillow-9.1.0-cp310-cp310-win32.whl", hash = "sha256:7a053bd4d65a3294b153bdd7724dce864a1d548416a5ef61f6d03bf149205160"}, - {file = "Pillow-9.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:97bda660702a856c2c9e12ec26fc6d187631ddfd896ff685814ab21ef0597033"}, - {file = "Pillow-9.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:21dee8466b42912335151d24c1665fcf44dc2ee47e021d233a40c3ca5adae59c"}, - {file = "Pillow-9.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b6d4050b208c8ff886fd3db6690bf04f9a48749d78b41b7a5bf24c236ab0165"}, - {file = "Pillow-9.1.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5cfca31ab4c13552a0f354c87fbd7f162a4fafd25e6b521bba93a57fe6a3700a"}, - {file = "Pillow-9.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed742214068efa95e9844c2d9129e209ed63f61baa4d54dbf4cf8b5e2d30ccf2"}, - {file = "Pillow-9.1.0-cp37-cp37m-win32.whl", hash = "sha256:c9efef876c21788366ea1f50ecb39d5d6f65febe25ad1d4c0b8dff98843ac244"}, - {file = "Pillow-9.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:de344bcf6e2463bb25179d74d6e7989e375f906bcec8cb86edb8b12acbc7dfef"}, - {file = "Pillow-9.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:17869489de2fce6c36690a0c721bd3db176194af5f39249c1ac56d0bb0fcc512"}, - {file = "Pillow-9.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:25023a6209a4d7c42154073144608c9a71d3512b648a2f5d4465182cb93d3477"}, - {file = "Pillow-9.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8782189c796eff29dbb37dd87afa4ad4d40fc90b2742704f94812851b725964b"}, - {file = "Pillow-9.1.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:463acf531f5d0925ca55904fa668bb3461c3ef6bc779e1d6d8a488092bdee378"}, - {file = "Pillow-9.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f42364485bfdab19c1373b5cd62f7c5ab7cc052e19644862ec8f15bb8af289e"}, - {file = "Pillow-9.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3fddcdb619ba04491e8f771636583a7cc5a5051cd193ff1aa1ee8616d2a692c5"}, - {file = "Pillow-9.1.0-cp38-cp38-win32.whl", hash = "sha256:4fe29a070de394e449fd88ebe1624d1e2d7ddeed4c12e0b31624561b58948d9a"}, - {file = "Pillow-9.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:c24f718f9dd73bb2b31a6201e6db5ea4a61fdd1d1c200f43ee585fc6dcd21b34"}, - {file = "Pillow-9.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fb89397013cf302f282f0fc998bb7abf11d49dcff72c8ecb320f76ea6e2c5717"}, - {file = "Pillow-9.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c870193cce4b76713a2b29be5d8327c8ccbe0d4a49bc22968aa1e680930f5581"}, - {file = "Pillow-9.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69e5ddc609230d4408277af135c5b5c8fe7a54b2bdb8ad7c5100b86b3aab04c6"}, - {file = "Pillow-9.1.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35be4a9f65441d9982240e6966c1eaa1c654c4e5e931eaf580130409e31804d4"}, - {file = "Pillow-9.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82283af99c1c3a5ba1da44c67296d5aad19f11c535b551a5ae55328a317ce331"}, - {file = "Pillow-9.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a325ac71914c5c043fa50441b36606e64a10cd262de12f7a179620f579752ff8"}, - {file = "Pillow-9.1.0-cp39-cp39-win32.whl", hash = "sha256:a598d8830f6ef5501002ae85c7dbfcd9c27cc4efc02a1989369303ba85573e58"}, - {file = "Pillow-9.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0c51cb9edac8a5abd069fd0758ac0a8bfe52c261ee0e330f363548aca6893595"}, - {file = "Pillow-9.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a336a4f74baf67e26f3acc4d61c913e378e931817cd1e2ef4dfb79d3e051b481"}, - {file = "Pillow-9.1.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb1b89b11256b5b6cad5e7593f9061ac4624f7651f7a8eb4dfa37caa1dfaa4d0"}, - {file = "Pillow-9.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:255c9d69754a4c90b0ee484967fc8818c7ff8311c6dddcc43a4340e10cd1636a"}, - {file = "Pillow-9.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5a3ecc026ea0e14d0ad7cd990ea7f48bfcb3eb4271034657dc9d06933c6629a7"}, - {file = "Pillow-9.1.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5b0ff59785d93b3437c3703e3c64c178aabada51dea2a7f2c5eccf1bcf565a3"}, - {file = "Pillow-9.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7110ec1701b0bf8df569a7592a196c9d07c764a0a74f65471ea56816f10e2c8"}, - {file = "Pillow-9.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8d79c6f468215d1a8415aa53d9868a6b40c4682165b8cb62a221b1baa47db458"}, - {file = "Pillow-9.1.0.tar.gz", hash = "sha256:f401ed2bbb155e1ade150ccc63db1a4f6c1909d3d378f7d1235a44e90d75fb97"}, -] - -[package.extras] -docs = ["olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinx-rtd-theme (>=1.0)", "sphinxext-opengraph"] -tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "platformdirs" -version = "4.2.2" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] - -[[package]] -name = "pooch" -version = "1.8.2" -description = "A friend to fetch your data files" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pooch-1.8.2-py3-none-any.whl", hash = "sha256:3529a57096f7198778a5ceefd5ac3ef0e4d06a6ddaf9fc2d609b806f25302c47"}, - {file = "pooch-1.8.2.tar.gz", hash = "sha256:76561f0de68a01da4df6af38e9955c4c9d1a5c90da73f7e40276a5728ec83d10"}, -] - -[package.dependencies] -packaging = ">=20.0" -platformdirs = ">=2.5.0" -requests = ">=2.19.0" - -[package.extras] -progress = ["tqdm (>=4.41.0,<5.0.0)"] -sftp = ["paramiko (>=2.7.0)"] -xxhash = ["xxhash (>=1.4.3)"] - -[[package]] -name = "proglog" -version = "0.1.10" -description = "Log and progress bar manager for console, notebooks, web..." -optional = false -python-versions = "*" -files = [ - {file = "proglog-0.1.10-py3-none-any.whl", hash = "sha256:19d5da037e8c813da480b741e3fa71fb1ac0a5b02bf21c41577c7f327485ec50"}, - {file = "proglog-0.1.10.tar.gz", hash = "sha256:658c28c9c82e4caeb2f25f488fff9ceace22f8d69b15d0c1c86d64275e4ddab4"}, -] - -[package.dependencies] -tqdm = "*" - -[[package]] -name = "protobuf" -version = "5.27.2" -description = "" -optional = false -python-versions = ">=3.8" -files = [ - {file = "protobuf-5.27.2-cp310-abi3-win32.whl", hash = "sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38"}, - {file = "protobuf-5.27.2-cp310-abi3-win_amd64.whl", hash = "sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505"}, - {file = "protobuf-5.27.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5"}, - {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b"}, - {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e"}, - {file = "protobuf-5.27.2-cp38-cp38-win32.whl", hash = "sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863"}, - {file = "protobuf-5.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6"}, - {file = "protobuf-5.27.2-cp39-cp39-win32.whl", hash = "sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca"}, - {file = "protobuf-5.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce"}, - {file = "protobuf-5.27.2-py3-none-any.whl", hash = "sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470"}, - {file = "protobuf-5.27.2.tar.gz", hash = "sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714"}, -] - -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] - -[[package]] -name = "pycparser" -version = "2.22" -description = "C parser in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, -] - -[[package]] -name = "pydub" -version = "0.25.1" -description = "Manipulate audio with an simple and easy high level interface" -optional = false -python-versions = "*" -files = [ - {file = "pydub-0.25.1-py2.py3-none-any.whl", hash = "sha256:65617e33033874b59d87db603aa1ed450633288aefead953b30bded59cb599a6"}, - {file = "pydub-0.25.1.tar.gz", hash = "sha256:980a33ce9949cab2a569606b65674d748ecbca4f0796887fd6f46173a7b0d30f"}, -] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-rapidjson" -version = "1.18" -description = "Python wrapper around rapidjson" -optional = false -python-versions = ">=3.6" -files = [ - {file = "python-rapidjson-1.18.tar.gz", hash = "sha256:09a5c362e2fec2a41b53e79e88bd8f0704447cb67c1c89a59e3092ccb4a30355"}, - {file = "python_rapidjson-1.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f38c7ca5fee31423bb34f464c789f57954886dd00e1a8c8483fd13e0c0d2583"}, - {file = "python_rapidjson-1.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1912224817f734ee9138e91d170b62818fd01caa731aa8668e8c9bce9017fe7e"}, - {file = "python_rapidjson-1.18-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2af6ca181e812f2306d4806beb974334ddd0774a8f62194ad1721277236f4ad1"}, - {file = "python_rapidjson-1.18-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08f859f64470ecb307cdcd7a532bef9c9ab3c94d2005c5693a7e18b3a11d4b28"}, - {file = "python_rapidjson-1.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:507595740300e95dded254536558cd56733cc3207e3c2457f19231ad00e78d85"}, - {file = "python_rapidjson-1.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5815af2f69a11c114e5004a77b8b036b5abcd06202c8bc1525856f9d836254a3"}, - {file = "python_rapidjson-1.18-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d680b8c8f4dbceb465544bbdd28463aa7e0b651343aa73c2476533bf300e0266"}, - {file = "python_rapidjson-1.18-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ff22c4160227be38322a88856f011c95d199103c30993bf3ee64f4bce9221807"}, - {file = "python_rapidjson-1.18-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:9deb8a8a2df2982b446f2a19264a5da2780ddb415caf9e11d48e74701053f02e"}, - {file = "python_rapidjson-1.18-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f6ecd86abf689538fdab5a55483c38bf10bdd9a8ed204ae10fa5a1bac7222d88"}, - {file = "python_rapidjson-1.18-cp310-cp310-win32.whl", hash = "sha256:a9d4cd0be643b8310c1c92987961c06b68429527154e9bea75118802cd179178"}, - {file = "python_rapidjson-1.18-cp310-cp310-win_amd64.whl", hash = "sha256:52f1d509ec20ab5d26f6dbc5d56821e0b2b1a5a878439eb0b3a33137b59029f5"}, - {file = "python_rapidjson-1.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:83912aae7c508204c263818befa24cf3223ecf0175e70d0412169e1302f1b4f2"}, - {file = "python_rapidjson-1.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0f45a02e4593879772099cf88d18dbde3376334684a809feb9228b8745c0c08c"}, - {file = "python_rapidjson-1.18-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f201e0c1e41c0e491cf2eca121d51f30c666f35ce33a6d14ba8fc5b76e4a2fa7"}, - {file = "python_rapidjson-1.18-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:697d06a66a1ba267f5abbb04971e461df1d4528ba341af6848a1ef01ae224e90"}, - {file = "python_rapidjson-1.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7e22b841fda1ec8c9e0a49069fbc6579363ba79fa5398fc7d37666357068cf"}, - {file = "python_rapidjson-1.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:356b2f81e6cdb4c1bb9122b635c8bd827f845da7c0de8618874c933fb88de573"}, - {file = "python_rapidjson-1.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:acd2430dd7a8f66618247635c51a9413679e9a5279aaea708f854ef03cc933e1"}, - {file = "python_rapidjson-1.18-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a587b3ec2b76480dfb57409654a9344ab47910e1b9d09e1c8eefe2db6c8c7364"}, - {file = "python_rapidjson-1.18-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2cf502e6c01d0933dc65888ab62b86d67967903c9a66158c2e458b312e671345"}, - {file = "python_rapidjson-1.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:43e622aa170f0b1e04f5b5ac8c7bf94b99f79efceb3608d8f1456f617cd79cdb"}, - {file = "python_rapidjson-1.18-cp311-cp311-win32.whl", hash = "sha256:f9c9faa7c1df63e2b238fcbdb915d52eba9ba42ee6e2a502f81e8aac07938783"}, - {file = "python_rapidjson-1.18-cp311-cp311-win_amd64.whl", hash = "sha256:e7b1cadf5c8852ae6e0a19fcf5b734eef4f92170292686cfdcced1302ea0aa20"}, - {file = "python_rapidjson-1.18-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52912323a2ac460ea605ab55f437196f662ec9db82669367dab4cda8f4c05b13"}, - {file = "python_rapidjson-1.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ebbd471d63bfa3c09180fd44eefec7b0f46ca41ee4552559c3a027799c67d781"}, - {file = "python_rapidjson-1.18-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb89a794242a692ef5d15ec9ad14c21fd17abc4671af62eadc8e6a1861a0319"}, - {file = "python_rapidjson-1.18-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcae2fdd5a2520dc85f98224ba1fc96badd0b68d3a8ee41485b3e37be67b7bef"}, - {file = "python_rapidjson-1.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f46face2b3e6891dd646dc1062c1133323ce4dc26409a084535f2af9e2bb4e3"}, - {file = "python_rapidjson-1.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67a3f71225200344ffaab3d28add533398b92f65d9166e649222a50677370fd2"}, - {file = "python_rapidjson-1.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7851252083aba29668cf1f02dc1c1e5e5a9113bf4f1dedc2f509c00e43f0c884"}, - {file = "python_rapidjson-1.18-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:32c32256edb35a234b16dfa6452bdf066cc272675cf9b3eb980e853505202766"}, - {file = "python_rapidjson-1.18-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5f9d27c090782f83de06dd51b9a7143b04c32314e53ed531a2d8f170f9f255e9"}, - {file = "python_rapidjson-1.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d3e0b8863cc0e78e36d41aae856101291c0bea9215690decafa6bae5f413e1f3"}, - {file = "python_rapidjson-1.18-cp312-cp312-win32.whl", hash = "sha256:123e7bf9726c09055d97ba0c4fc8cdb9deda80c2a9d5409bfd49935a0f38d0b2"}, - {file = "python_rapidjson-1.18-cp312-cp312-win_amd64.whl", hash = "sha256:03d14892a1cdc24e5b200ca619fda397e0f36a3d1420edcb7212ae47d4d9fd3e"}, - {file = "python_rapidjson-1.18-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1d4861adede630a5eee77c46f9c901da2ac15bc3c0296ad851d69036db3a0374"}, - {file = "python_rapidjson-1.18-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:35d0e9c8dd61670b5833546b3ded057b68e696ab530d3c14603e718a4bc3db00"}, - {file = "python_rapidjson-1.18-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d229114f738ee0d9ff1b727aaf7bfe6a90d6f77e0449b33f87ad7814c493c921"}, - {file = "python_rapidjson-1.18-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb0a8361b668e920d7fa78f725f59d224adedb3620f526509cef4416778e3393"}, - {file = "python_rapidjson-1.18-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20256271a00f758a96ccfdd61434c11a1fc6b5e3fd4e7324dd832e576c9f720b"}, - {file = "python_rapidjson-1.18-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad82fa706d7437ceb0d8e36870715e8318359bc604016fc505c14ccc109322e9"}, - {file = "python_rapidjson-1.18-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f13a8be00c0fd31c75304f03df1240d16268720b9d12eca3d055f702dd607427"}, - {file = "python_rapidjson-1.18-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e9712964a864c7604319bebbdd4ab5de9a42698d3c9a6c15c964a06d586a2c66"}, - {file = "python_rapidjson-1.18-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0f36f9c194d8c893463128a57bd7cde3bb28151eaf5bb5db5f552de0eb0eb93"}, - {file = "python_rapidjson-1.18-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4704f9f987a62c4b7e419843bb3c5daea81271dba95cae47e92b2475978ae66b"}, - {file = "python_rapidjson-1.18-cp313-cp313-win32.whl", hash = "sha256:2d197c686a4eacc2defe9bd31bf73b23877ad4974857b72b65e126cef7a50fa5"}, - {file = "python_rapidjson-1.18-cp313-cp313-win_amd64.whl", hash = "sha256:30f4a317af410d3977cf405737a2d6e81c6695d24df33113523023f665bb5e75"}, - {file = "python_rapidjson-1.18-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:300b8d84d5bebea7988312950fc949c1701055086b2790afaaad68e8f1cf389d"}, - {file = "python_rapidjson-1.18-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:981dd50522999a4fe519ca14135e20b3acc4928df4d4421d96792913d2fb359d"}, - {file = "python_rapidjson-1.18-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d03390ac612090c58553e1d8454faff6099a2b2ee0c44ebd19546d5a73b30689"}, - {file = "python_rapidjson-1.18-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0550caca5227e3f929b63b758c19c584f39c10d4e1c4ad9b7e322f19030db3b8"}, - {file = "python_rapidjson-1.18-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37295c26b6270515666243d499c060006471b0517dbdf7690b5f855b9531f9b8"}, - {file = "python_rapidjson-1.18-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d058b9c740c55fe3ffab826742773f995620992eda6a31d794727526d0ea1610"}, - {file = "python_rapidjson-1.18-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0c4697e3fa587c7f3938d2394ff6563085bbf346e4cab29fb425595d267a59d1"}, - {file = "python_rapidjson-1.18-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:aa8fbc9c31d9320e80a290d3cf847756d37290628ccaad3719de6fa51ab43597"}, - {file = "python_rapidjson-1.18-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:191e051b7b384474b6558902b8c33f82474492e3d19cc188224cd1a5584ca4bf"}, - {file = "python_rapidjson-1.18-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dd0bc1b3d4d72bd3eb9f60f84473fcefb316912422267bf06d8c2290ef33e02"}, - {file = "python_rapidjson-1.18-cp38-cp38-win32.whl", hash = "sha256:1925a3ed72504812ab1d8edd59ad83bd4b96b5a3e149ee927f3cdb98b803ac22"}, - {file = "python_rapidjson-1.18-cp38-cp38-win_amd64.whl", hash = "sha256:4e21cbd8585598ce091990196fe6fe354c607e13e2b17794f3711a8f2b2b8b11"}, - {file = "python_rapidjson-1.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:68230f34a076a54298d5c860ae8aa08e3de5ab5a289b23b96a0a6039861f911b"}, - {file = "python_rapidjson-1.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b1ec8b167484523bc0d753998594cb2614061755191946b73c7e88e124287595"}, - {file = "python_rapidjson-1.18-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bad5d1a46b2d07f1d9b4ad1c316a36e024da451ff876d1572cb345c6bb50a42"}, - {file = "python_rapidjson-1.18-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:daf270f1d2feddf7680ddc2faf2778e814caf569095cc60c2079e856af3d2bc3"}, - {file = "python_rapidjson-1.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72948a56b9d4964d72f2f3862d5d168b247457f9d1e70cee750a0cd660f67555"}, - {file = "python_rapidjson-1.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0624eebe2ceba44dd84a3d3409fab1e7e1a021c3701b5ad5bd8a0fba47898d20"}, - {file = "python_rapidjson-1.18-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b975fcecbf5f3845ce72040be4630ece4c5b467c24c749be2a81827918a2e530"}, - {file = "python_rapidjson-1.18-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f725f560f4865fb5b684a26935f78690e44aa475c8b41a793d096a122115c9b3"}, - {file = "python_rapidjson-1.18-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:0a31ea1a7a11a6e60fed42364e6726d29346f6ba1a9212ea1b6753731f600909"}, - {file = "python_rapidjson-1.18-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:477aff79a2d87daee45c49e917097426fe5495f99fb935a5adb20716cb52c86a"}, - {file = "python_rapidjson-1.18-cp39-cp39-win32.whl", hash = "sha256:d13a0e3f647726f653cd3d6bfc770d595f51d75212b38df82d2a465bc0df5dd8"}, - {file = "python_rapidjson-1.18-cp39-cp39-win_amd64.whl", hash = "sha256:412c716cbf41ecfb99879443fc11288513053e63302232df0ed99d629fd220da"}, -] - -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - -[[package]] -name = "pywavelets" -version = "1.6.0" -description = "PyWavelets, wavelet transform module" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pywavelets-1.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ddc1ff5ad706313d930f857f9656f565dfb81b85bbe58a9db16ad8fa7d1537c5"}, - {file = "pywavelets-1.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:78feab4e0c25fa32034b6b64cb854c6ce15663b4f0ffb25d8f0ee58915300f9b"}, - {file = "pywavelets-1.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be36f08efe9bc3abf40cf40cd2ee0aa0db26e4894e13ce5ac178442864161e8c"}, - {file = "pywavelets-1.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0595c51472c9c5724fe087cb73e2797053fd25c788d6553fdad6ff61abc60e91"}, - {file = "pywavelets-1.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:058a750477dde633ac53b8806f835af3559d52db6532fb2b93c1f4b5441365b8"}, - {file = "pywavelets-1.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:538795d9c4181152b414285b5a7f72ac52581ecdcdce74b6cca3fa0b8a5ab0aa"}, - {file = "pywavelets-1.6.0-cp310-cp310-win32.whl", hash = "sha256:47de024ba4f9df97e98b5f540340e1a9edd82d2c477450bef8c9b5381487128e"}, - {file = "pywavelets-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:e2c44760c0906ddf2176920a2613287f6eea947f166ce7eee9546081b06a6835"}, - {file = "pywavelets-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d91aaaf6de53b758bcdc96c81cdb5a8607758602be49f691188c0e108cf1e738"}, - {file = "pywavelets-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3b5302edb6d1d1ff6636d37c9ff29c4892f2a3648d736cc1df01f3f36e25c8cf"}, - {file = "pywavelets-1.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5e655446e37a3c87213d5c6386b86f65c4d61736b4432d720171e7dd6523d6a"}, - {file = "pywavelets-1.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ec7d69b746a0eaa327b829a3252a63619f2345e263177be5dd9bf30d7933c8d"}, - {file = "pywavelets-1.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:97ea9613bd6b7108ebb44b709060adc7e2d5fac73be7152342bdd5513d75f84e"}, - {file = "pywavelets-1.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:48b3813c6d1a7a8194f37dbb5dbbdf2fe1112152c91445ea2e54f64ff6350c36"}, - {file = "pywavelets-1.6.0-cp311-cp311-win32.whl", hash = "sha256:4ffb484d096a5eb10af7121e0203546a03e1369328df321a33ef91f67bac40cf"}, - {file = "pywavelets-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:274bc47b289585383aa65519b3fcae5b4dee5e31db3d4198d4fad701a70e59f7"}, - {file = "pywavelets-1.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d6ec113386a432e04103f95e351d2657b42145bd1e1ed26513423391bcb5f011"}, - {file = "pywavelets-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab652112d3932d21f020e281e06926a751354c2b5629fb716f5eb9d0104b84e5"}, - {file = "pywavelets-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47b0314a22616c5f3f08760f0e00b4a15b7c7dadca5e39bb701cf7869a4207c5"}, - {file = "pywavelets-1.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138471513bc0a4cd2ddc4e50c7ec04e3468c268e101a0d02f698f6aedd1d5e79"}, - {file = "pywavelets-1.6.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:67936491ae3e5f957c428e34fdaed21f131535b8d60c7c729a1b539ce8864837"}, - {file = "pywavelets-1.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:dd798cee3d28fb3d32a26a00d9831a20bf316c36d685e4ced01b4e4a8f36f5ce"}, - {file = "pywavelets-1.6.0-cp312-cp312-win32.whl", hash = "sha256:e772f7f0c16bfc3be8ac3cd10d29a9920bb7a39781358856223c491b899e6e79"}, - {file = "pywavelets-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:4ef15a63a72afa67ae9f4f3b06c95c5382730fb3075e668d49a880e65f2f089c"}, - {file = "pywavelets-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:627df378e63e9c789b6f2e7060cb4264ebae6f6b0efc1da287a2c060de454a1f"}, - {file = "pywavelets-1.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a413b51dc19e05243fe0b0864a8e8a16b5ca9bf2e4713da00a95b1b5747a5367"}, - {file = "pywavelets-1.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be615c6c1873e189c265d4a76d1751ec49b17e29725e6dd2e9c74f1868f590b7"}, - {file = "pywavelets-1.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4021ef69ec9f3862f66580fc4417be728bd78722914394594b48212fd1fcaf21"}, - {file = "pywavelets-1.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8fbf7b61b28b5457693c034e58a01622756d1fd60a80ae13ac5888b1d3e57e80"}, - {file = "pywavelets-1.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f58ddbb0a6cd243928876edfc463b990763a24fb94498607d6fea690e32cca4c"}, - {file = "pywavelets-1.6.0-cp39-cp39-win32.whl", hash = "sha256:42a22e68e345b6de7d387ef752111ab4530c98048d2b4bdac8ceefb078b4ead6"}, - {file = "pywavelets-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:32198de321892743c1a3d1957fe1cd8a8ecc078bfbba6b8f3982518e897271d7"}, - {file = "pywavelets-1.6.0.tar.gz", hash = "sha256:ea027c70977122c5fc27b2510f0a0d9528f9c3df6ea3e4c577ca55fd00325a5b"}, -] - -[package.dependencies] -numpy = ">=1.22.4,<3" - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "pyzmq" -version = "23.2.1" -description = "Python bindings for 0MQ" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pyzmq-23.2.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:a3fd44b5046d247e7f0f1660bcafe7b5fb0db55d0934c05dd57dda9e1f823ce7"}, - {file = "pyzmq-23.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2141e6798d5981be04c08996d27962086a1aa3ea536fe9cf7e89817fd4523f86"}, - {file = "pyzmq-23.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a39ddb0431a68954bd318b923230fa5b649c9c62b0e8340388820c5f1b15bd2"}, - {file = "pyzmq-23.2.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e06747014a5ad1b28cebf5bc1ddcdaccfb44e9b441d35e6feb1286c8a72e54be"}, - {file = "pyzmq-23.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e0113d70b095339e99bb522fe7294f5ae6a7f3b2b8f52f659469a74b5cc7661"}, - {file = "pyzmq-23.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:71b32a1e827bdcbf73750e60370d3b07685816ff3d8695f450f0f8c3226503f8"}, - {file = "pyzmq-23.2.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:55568a020ad2cae9ae36da6058e7ca332a56df968f601cbdb7cf6efb2a77579a"}, - {file = "pyzmq-23.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c02a0cd39dc01659b3d6cb70bb3a41aebd9885fd78239acdd8d9c91351c4568"}, - {file = "pyzmq-23.2.1-cp310-cp310-win32.whl", hash = "sha256:e1fe30bcd5aea5948c42685fad910cd285eacb2518ea4dc6c170d6b535bee95d"}, - {file = "pyzmq-23.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:650389bbfca73955b262b2230423d89992f38ec48033307ae80e700eaa2fbb63"}, - {file = "pyzmq-23.2.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:e753eee6d3b93c5354e8ba0a1d62956ee49355f0a36e00570823ef64e66183f5"}, - {file = "pyzmq-23.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f07016e3cf088dbfc6e7c5a7b3f540db5c23b0190d539e4fd3e2b5e6beffa4b5"}, - {file = "pyzmq-23.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4805af9614b0b41b7e57d17673459facf85604dac502a5a9244f6e8c9a4de658"}, - {file = "pyzmq-23.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39dd252b683816935702825e5bf775df16090619ced9bb4ba68c2d0b6f0c9b18"}, - {file = "pyzmq-23.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:84678153432241bcdca2210cf4ff83560b200556867aea913ffbb960f5d5f340"}, - {file = "pyzmq-23.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:90d88f9d9a2ae6cfb1dc4ea2d1710cdf6456bc1b9a06dd1bb485c5d298f2517e"}, - {file = "pyzmq-23.2.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:794871988c34727c7f79bdfe2546e6854ae1fa2e1feb382784f23a9c6c63ecb3"}, - {file = "pyzmq-23.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c56b1a62a1fb87565343c57b6743fd5da6e138b8c6562361d7d9b5ce4acf399a"}, - {file = "pyzmq-23.2.1-cp311-cp311-win32.whl", hash = "sha256:c3ebf1668664d20c8f7d468955f18379b7d1f7bc8946b13243d050fa3888c7ff"}, - {file = "pyzmq-23.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:ec9803aca9491fd6f0d853d2a6147f19f8deaaa23b1b713d05c5d09e56ea7142"}, - {file = "pyzmq-23.2.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:385609812eafd9970c3752c51f2f6c4f224807e3e441bcfd8c8273877d00c8a8"}, - {file = "pyzmq-23.2.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b861db65f6b8906c8d6db51dde2448f266f0c66bf28db2c37aea50f58a849859"}, - {file = "pyzmq-23.2.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6b1e79bba24f6df1712e3188d5c32c480d8eda03e8ecff44dc8ecb0805fa62f3"}, - {file = "pyzmq-23.2.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8dc66f109a245653b19df0f44a5af7a3f14cb8ad6c780ead506158a057bd36ce"}, - {file = "pyzmq-23.2.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:b815991c7d024bf461f358ad871f2be1135576274caed5749c4828859e40354e"}, - {file = "pyzmq-23.2.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:29b74774a0bfd3c4d98ac853f0bdca55bd9ec89d5b0def5486407cca54472ef8"}, - {file = "pyzmq-23.2.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:4bb798bef181648827019001f6be43e1c48b34b477763b37a8d27d8c06d197b8"}, - {file = "pyzmq-23.2.1-cp36-cp36m-win32.whl", hash = "sha256:565bd5ab81f6964fc4067ccf2e00877ad0fa917308975694bbb54378389215f8"}, - {file = "pyzmq-23.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:1f368a82b29f80071781b20663c0fc0c8f6b13273f9f5abe1526af939534f90f"}, - {file = "pyzmq-23.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c9cfaf530e6a7ff65f0afe275e99f983f68b54dfb23ea401f0bc297a632766b6"}, - {file = "pyzmq-23.2.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c558b50402fca1acc94329c5d8f12aa429738904a5cfb32b9ed3c61235221bb"}, - {file = "pyzmq-23.2.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20bafc4095eab00f41a510579363a3f5e1f5c69d7ee10f1d88895c4df0259183"}, - {file = "pyzmq-23.2.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f619fd38fc2641abfb53cca719c165182500600b82c695cc548a0f05f764be05"}, - {file = "pyzmq-23.2.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:044447ae4b2016a6b8697571fd633f799f860b19b76c4a2fd9b1140d52ee6745"}, - {file = "pyzmq-23.2.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:49d30ba7074f469e8167917abf9eb854c6503ae10153034a6d4df33618f1db5f"}, - {file = "pyzmq-23.2.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:48400b96788cdaca647021bf19a9cd668384f46e4d9c55cf045bdd17f65299c8"}, - {file = "pyzmq-23.2.1-cp37-cp37m-win32.whl", hash = "sha256:8a68f57b7a3f7b6b52ada79876be1efb97c8c0952423436e84d70cc139f16f0d"}, - {file = "pyzmq-23.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9e5bf6e7239fc9687239de7a283aa8b801ab85371116045b33ae20132a1325d6"}, - {file = "pyzmq-23.2.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:0ff6294e001129a9f22dcbfba186165c7e6f573c46de2704d76f873c94c65416"}, - {file = "pyzmq-23.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ffc6b1623d0f9affb351db4ca61f432dca3628a5ee015f9bf2bfbe9c6836881c"}, - {file = "pyzmq-23.2.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4d6f110c56f7d5b4d64dde3a382ae61b6d48174e30742859d8e971b18b6c9e5c"}, - {file = "pyzmq-23.2.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9269fbfe3a4eb2009199120861c4571ef1655fdf6951c3e7f233567c94e8c602"}, - {file = "pyzmq-23.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12e62ff0d5223ec09b597ab6d73858b9f64a51221399f3cb08aa495e1dff7935"}, - {file = "pyzmq-23.2.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6fd5d0d50cbcf4bc376861529a907bed026a4cbe8c22a500ff8243231ef02433"}, - {file = "pyzmq-23.2.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9d0ab2936085c85a1fc6f9fd8f89d5235ae99b051e90ec5baa5e73ad44346e1f"}, - {file = "pyzmq-23.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:022cf5ea7bcaa8a06a03c2706e0ae66904b6138b2155577cd34c64bc7cc637ab"}, - {file = "pyzmq-23.2.1-cp38-cp38-win32.whl", hash = "sha256:28dbdb90b2f6b131f8f10e6081012e4e25234213433420e67e0c1162de537113"}, - {file = "pyzmq-23.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:10d1910ec381b851aeb024a042a13db178cb1edf125e76a4e9d2548ad103aadb"}, - {file = "pyzmq-23.2.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:99a5a77a10863493a1ee8dece02578c6b32025fb3afff91b40476bc489e81648"}, - {file = "pyzmq-23.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:aecd6ceaccc4b594e0092d6513ef3f1c0fa678dd89f86bb8ff1a47014b8fca35"}, - {file = "pyzmq-23.2.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:415ff62ac525d9add1e3550430a09b9928d2d24a20cc4ce809e67caac41219ab"}, - {file = "pyzmq-23.2.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:67975a9e1237b9ccc78f457bef17691bbdd2055a9d26e81ee914ba376846d0ce"}, - {file = "pyzmq-23.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e106b64bad744fe469dc3dd864f2764d66399178c1bf39d45294cc7980f14f"}, - {file = "pyzmq-23.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8c842109d31a9281d678f668629241c405928afbebd913c48a5a8e7aee61f63d"}, - {file = "pyzmq-23.2.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fefdf9b685fda4141b95ebec975946076a5e0723ff70b037032b2085c5317684"}, - {file = "pyzmq-23.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:79a87831b47a9f6161ad23fa5e89d5469dc585abc49f90b9b07fea8905ae1234"}, - {file = "pyzmq-23.2.1-cp39-cp39-win32.whl", hash = "sha256:342ca3077f47ec2ee41b9825142b614e03e026347167cbc72a59b618c4f6106c"}, - {file = "pyzmq-23.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:5e05492be125dce279721d6b54fd1b956546ecc4bcdfcf8e7b4c413bc0874c10"}, - {file = "pyzmq-23.2.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:07ed8aaf7ffe150af873269690cc654ffeca7491f62aae0f3821baa181f8d5fe"}, - {file = "pyzmq-23.2.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ad28ddb40db8e450d7d4bf8a1d765d3f87b63b10e7e9a825a3c130c6371a8c03"}, - {file = "pyzmq-23.2.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2f67b63f53c6994d601404fd1a329e6d940ac3dd1d92946a93b2b9c70df67b9f"}, - {file = "pyzmq-23.2.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c890309296f53f9aa32ffcfc51d805705e1982bffd27c9692a8f1e1b8de279f4"}, - {file = "pyzmq-23.2.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:624fd38071a817644acdae075b92a23ea0bdd126a58148288e8284d23ec361ce"}, - {file = "pyzmq-23.2.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a114992a193577cb62233abf8cb2832970f9975805a64740e325d2f895e7f85a"}, - {file = "pyzmq-23.2.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c780acddd2934c6831ff832ecbf78a45a7b62d4eb216480f863854a8b7d54fa7"}, - {file = "pyzmq-23.2.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d904f6595acfaaf99a1a61881fea068500c40374d263e5e073aa4005e5f9c28a"}, - {file = "pyzmq-23.2.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:929d548b74c0f82f7f95b54e4a43f9e4ce2523cfb8a54d3f7141e45652304b2a"}, - {file = "pyzmq-23.2.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f392cbea531b7142d1958c0d4a0c9c8d760dc451e5848d8dd3387804d3e3e62c"}, - {file = "pyzmq-23.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a0f09d85c45f58aa8e715b42f8b26beba68b3b63a8f7049113478aca26efbc30"}, - {file = "pyzmq-23.2.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23e708fbfdf4ee3107422b69ca65da1b9f056b431fc0888096a8c1d6cd908e8f"}, - {file = "pyzmq-23.2.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35e635343ff367f697d00fa1484262bb68e36bc74c9b80737eac5a1e04c4e1b1"}, - {file = "pyzmq-23.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efb9e38b2a590282704269585de7eb33bf43dc294cad092e1b172e23d4c217e5"}, - {file = "pyzmq-23.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:407f909c4e8fde62fbdad9ebd448319792258cc0550c2815567a4d9d8d9e6d18"}, - {file = "pyzmq-23.2.1.tar.gz", hash = "sha256:2b381aa867ece7d0a82f30a0c7f3d4387b7cf2e0697e33efaa5bed6c5784abcd"}, -] - -[package.dependencies] -cffi = {version = "*", markers = "implementation_name == \"pypy\""} -py = {version = "*", markers = "implementation_name == \"pypy\""} - -[[package]] -name = "regex" -version = "2024.5.15" -description = "Alternative regular expression module, to replace re." -optional = false -python-versions = ">=3.8" -files = [ - {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a81e3cfbae20378d75185171587cbf756015ccb14840702944f014e0d93ea09f"}, - {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b59138b219ffa8979013be7bc85bb60c6f7b7575df3d56dc1e403a438c7a3f6"}, - {file = "regex-2024.5.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0bd000c6e266927cb7a1bc39d55be95c4b4f65c5be53e659537537e019232b1"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eaa7ddaf517aa095fa8da0b5015c44d03da83f5bd49c87961e3c997daed0de7"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba68168daedb2c0bab7fd7e00ced5ba90aebf91024dea3c88ad5063c2a562cca"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e8d717bca3a6e2064fc3a08df5cbe366369f4b052dcd21b7416e6d71620dca1"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1337b7dbef9b2f71121cdbf1e97e40de33ff114801263b275aafd75303bd62b5"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9ebd0a36102fcad2f03696e8af4ae682793a5d30b46c647eaf280d6cfb32796"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9efa1a32ad3a3ea112224897cdaeb6aa00381627f567179c0314f7b65d354c62"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1595f2d10dff3d805e054ebdc41c124753631b6a471b976963c7b28543cf13b0"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b802512f3e1f480f41ab5f2cfc0e2f761f08a1f41092d6718868082fc0d27143"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a0981022dccabca811e8171f913de05720590c915b033b7e601f35ce4ea7019f"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:19068a6a79cf99a19ccefa44610491e9ca02c2be3305c7760d3831d38a467a6f"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1b5269484f6126eee5e687785e83c6b60aad7663dafe842b34691157e5083e53"}, - {file = "regex-2024.5.15-cp310-cp310-win32.whl", hash = "sha256:ada150c5adfa8fbcbf321c30c751dc67d2f12f15bd183ffe4ec7cde351d945b3"}, - {file = "regex-2024.5.15-cp310-cp310-win_amd64.whl", hash = "sha256:ac394ff680fc46b97487941f5e6ae49a9f30ea41c6c6804832063f14b2a5a145"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f5b1dff3ad008dccf18e652283f5e5339d70bf8ba7c98bf848ac33db10f7bc7a"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c6a2b494a76983df8e3d3feea9b9ffdd558b247e60b92f877f93a1ff43d26656"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a32b96f15c8ab2e7d27655969a23895eb799de3665fa94349f3b2fbfd547236f"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10002e86e6068d9e1c91eae8295ef690f02f913c57db120b58fdd35a6bb1af35"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec54d5afa89c19c6dd8541a133be51ee1017a38b412b1321ccb8d6ddbeb4cf7d"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10e4ce0dca9ae7a66e6089bb29355d4432caed736acae36fef0fdd7879f0b0cb"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e507ff1e74373c4d3038195fdd2af30d297b4f0950eeda6f515ae3d84a1770f"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1f059a4d795e646e1c37665b9d06062c62d0e8cc3c511fe01315973a6542e40"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0721931ad5fe0dda45d07f9820b90b2148ccdd8e45bb9e9b42a146cb4f695649"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:833616ddc75ad595dee848ad984d067f2f31be645d603e4d158bba656bbf516c"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:287eb7f54fc81546346207c533ad3c2c51a8d61075127d7f6d79aaf96cdee890"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:19dfb1c504781a136a80ecd1fff9f16dddf5bb43cec6871778c8a907a085bb3d"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:119af6e56dce35e8dfb5222573b50c89e5508d94d55713c75126b753f834de68"}, - {file = "regex-2024.5.15-cp311-cp311-win32.whl", hash = "sha256:1c1c174d6ec38d6c8a7504087358ce9213d4332f6293a94fbf5249992ba54efa"}, - {file = "regex-2024.5.15-cp311-cp311-win_amd64.whl", hash = "sha256:9e717956dcfd656f5055cc70996ee2cc82ac5149517fc8e1b60261b907740201"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:632b01153e5248c134007209b5c6348a544ce96c46005d8456de1d552455b014"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e64198f6b856d48192bf921421fdd8ad8eb35e179086e99e99f711957ffedd6e"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68811ab14087b2f6e0fc0c2bae9ad689ea3584cad6917fc57be6a48bbd012c49"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ec0c2fea1e886a19c3bee0cd19d862b3aa75dcdfb42ebe8ed30708df64687a"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0c0c0003c10f54a591d220997dd27d953cd9ccc1a7294b40a4be5312be8797b"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2431b9e263af1953c55abbd3e2efca67ca80a3de8a0437cb58e2421f8184717a"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a605586358893b483976cffc1723fb0f83e526e8f14c6e6614e75919d9862cf"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391d7f7f1e409d192dba8bcd42d3e4cf9e598f3979cdaed6ab11288da88cb9f2"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9ff11639a8d98969c863d4617595eb5425fd12f7c5ef6621a4b74b71ed8726d5"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4eee78a04e6c67e8391edd4dad3279828dd66ac4b79570ec998e2155d2e59fd5"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8fe45aa3f4aa57faabbc9cb46a93363edd6197cbc43523daea044e9ff2fea83e"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d0a3d8d6acf0c78a1fff0e210d224b821081330b8524e3e2bc5a68ef6ab5803d"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c486b4106066d502495b3025a0a7251bf37ea9540433940a23419461ab9f2a80"}, - {file = "regex-2024.5.15-cp312-cp312-win32.whl", hash = "sha256:c49e15eac7c149f3670b3e27f1f28a2c1ddeccd3a2812cba953e01be2ab9b5fe"}, - {file = "regex-2024.5.15-cp312-cp312-win_amd64.whl", hash = "sha256:673b5a6da4557b975c6c90198588181029c60793835ce02f497ea817ff647cb2"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87e2a9c29e672fc65523fb47a90d429b70ef72b901b4e4b1bd42387caf0d6835"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3bea0ba8b73b71b37ac833a7f3fd53825924165da6a924aec78c13032f20850"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfc4f82cabe54f1e7f206fd3d30fda143f84a63fe7d64a81558d6e5f2e5aaba9"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5bb9425fe881d578aeca0b2b4b3d314ec88738706f66f219c194d67179337cb"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64c65783e96e563103d641760664125e91bd85d8e49566ee560ded4da0d3e704"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf2430df4148b08fb4324b848672514b1385ae3807651f3567871f130a728cc3"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5397de3219a8b08ae9540c48f602996aa6b0b65d5a61683e233af8605c42b0f2"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:455705d34b4154a80ead722f4f185b04c4237e8e8e33f265cd0798d0e44825fa"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2b6f1b3bb6f640c1a92be3bbfbcb18657b125b99ecf141fb3310b5282c7d4ed"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3ad070b823ca5890cab606c940522d05d3d22395d432f4aaaf9d5b1653e47ced"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5b5467acbfc153847d5adb21e21e29847bcb5870e65c94c9206d20eb4e99a384"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e6662686aeb633ad65be2a42b4cb00178b3fbf7b91878f9446075c404ada552f"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:2b4c884767504c0e2401babe8b5b7aea9148680d2e157fa28f01529d1f7fcf67"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3cd7874d57f13bf70078f1ff02b8b0aa48d5b9ed25fc48547516c6aba36f5741"}, - {file = "regex-2024.5.15-cp38-cp38-win32.whl", hash = "sha256:e4682f5ba31f475d58884045c1a97a860a007d44938c4c0895f41d64481edbc9"}, - {file = "regex-2024.5.15-cp38-cp38-win_amd64.whl", hash = "sha256:d99ceffa25ac45d150e30bd9ed14ec6039f2aad0ffa6bb87a5936f5782fc1569"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13cdaf31bed30a1e1c2453ef6015aa0983e1366fad2667657dbcac7b02f67133"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cac27dcaa821ca271855a32188aa61d12decb6fe45ffe3e722401fe61e323cd1"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7dbe2467273b875ea2de38ded4eba86cbcbc9a1a6d0aa11dcf7bd2e67859c435"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64f18a9a3513a99c4bef0e3efd4c4a5b11228b48aa80743be822b71e132ae4f5"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d347a741ea871c2e278fde6c48f85136c96b8659b632fb57a7d1ce1872547600"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1878b8301ed011704aea4c806a3cadbd76f84dece1ec09cc9e4dc934cfa5d4da"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4babf07ad476aaf7830d77000874d7611704a7fcf68c9c2ad151f5d94ae4bfc4"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35cb514e137cb3488bce23352af3e12fb0dbedd1ee6e60da053c69fb1b29cc6c"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cdd09d47c0b2efee9378679f8510ee6955d329424c659ab3c5e3a6edea696294"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:72d7a99cd6b8f958e85fc6ca5b37c4303294954eac1376535b03c2a43eb72629"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a094801d379ab20c2135529948cb84d417a2169b9bdceda2a36f5f10977ebc16"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c0c18345010870e58238790a6779a1219b4d97bd2e77e1140e8ee5d14df071aa"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:16093f563098448ff6b1fa68170e4acbef94e6b6a4e25e10eae8598bb1694b5d"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e38a7d4e8f633a33b4c7350fbd8bad3b70bf81439ac67ac38916c4a86b465456"}, - {file = "regex-2024.5.15-cp39-cp39-win32.whl", hash = "sha256:71a455a3c584a88f654b64feccc1e25876066c4f5ef26cd6dd711308aa538694"}, - {file = "regex-2024.5.15-cp39-cp39-win_amd64.whl", hash = "sha256:cab12877a9bdafde5500206d1020a584355a97884dfd388af3699e9137bf7388"}, - {file = "regex-2024.5.15.tar.gz", hash = "sha256:d3ee02d9e5f482cc8309134a91eeaacbdd2261ba111b0fef3748eeb4913e6a2c"}, -] - -[[package]] -name = "requests" -version = "2.32.3" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "resampy" -version = "0.4.3" -description = "Efficient signal resampling" -optional = false -python-versions = "*" -files = [ - {file = "resampy-0.4.3-py3-none-any.whl", hash = "sha256:ad2ed64516b140a122d96704e32bc0f92b23f45419e8b8f478e5a05f83edcebd"}, - {file = "resampy-0.4.3.tar.gz", hash = "sha256:a0d1c28398f0e55994b739650afef4e3974115edbe96cd4bb81968425e916e47"}, -] - -[package.dependencies] -numba = ">=0.53" -numpy = ">=1.17" - -[package.extras] -design = ["optuna (>=2.10.0)"] -docs = ["numpydoc", "sphinx (!=1.3.1)"] -tests = ["pytest (<8)", "pytest-cov", "scipy (>=1.1)"] - -[[package]] -name = "scikit-image" -version = "0.20.0" -description = "Image processing in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "scikit_image-0.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3cec8c5e8412ee19642a916648144186eb6b60c39fb6608ab478b4d1a4575e25"}, - {file = "scikit_image-0.20.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:0ab378822fadc93db7e917a266d489ea33df3b42edfef197caaebbabbc2e4ecc"}, - {file = "scikit_image-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6797e3ef5fc53897bde131cfc3ceba6ce247d89cfe194fc8d3aba7f5c12aaf6"}, - {file = "scikit_image-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f667dcf01737248bc5bd0a99fad58475abeb6b6a8229aecee9fdb96cf988ae85"}, - {file = "scikit_image-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:79a400ffe35fc7f64d1d043f3d043e062015689ad5637c35cd5569edae87ae13"}, - {file = "scikit_image-0.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:049d955869620453b9e0568c2da62c8fec47bf3714be48b5d46bbaebb91bdc1f"}, - {file = "scikit_image-0.20.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:a503ee85b444234ee88f34bf8674872dc37c6124ff60b7eb9242813de012ff4e"}, - {file = "scikit_image-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3943d7355d02b40c066fd87cd5fe1b4f6637a16448e62333c4191a65ebf40a1c"}, - {file = "scikit_image-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d719242ea7e7250d49e38d1e33c44c2dd59c3414ae085881d168b98cbb6059a"}, - {file = "scikit_image-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:fdd1fd258e78c86e382fd687177431088a40880bd785e0ab40ee5f3794366710"}, - {file = "scikit_image-0.20.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1cd0486cb769d906307a3ec3884630be822d8ec2f41069e197336f904f584a33"}, - {file = "scikit_image-0.20.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:2e9026161d0a698f532352dda6455a0bc13b1c9d831ea9279726b59d064df574"}, - {file = "scikit_image-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c123e6b0677dc1697c04b5bf2efb7110bcca511b4bc6967a38fa395ae5edf44"}, - {file = "scikit_image-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76f2fd12b537daea806a078df9ea76f5cc5a529d5bd7c41d7d0a101e9c5f91c4"}, - {file = "scikit_image-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:2118d610096754bca44b5d37328e1382e5fa7c6493803685100c9238e257d848"}, - {file = "scikit_image-0.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:13a5c1c81ee5bcb64ee8ca8f1a2cf371b0c4345ea6fb67c3052e1c6d5edbd936"}, - {file = "scikit_image-0.20.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:1794889d2dbb385c7ad5656363371ba0057b7a3335cda093a11415af84bb96e2"}, - {file = "scikit_image-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df14f8a55dae511749b081d9402ea215ea7c641bd6f74f06aa7b623e132817df"}, - {file = "scikit_image-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b856efc75e3051bea6d40a8ffcdaabd5682783ece1aa91c3f6777c3372a98ca1"}, - {file = "scikit_image-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:a600374394b76b7fc260cef54e1be21047c4de0ecffb0b7f2f7392cd8ba16ffa"}, - {file = "scikit_image-0.20.0.tar.gz", hash = "sha256:2cd784fce18bd31d71ade62c6221440199ead03acf7544086261ee032264cf61"}, -] - -[package.dependencies] -imageio = ">=2.4.1" -lazy_loader = ">=0.1" -networkx = ">=2.8" -numpy = ">=1.21.1" -packaging = ">=20.0" -pillow = ">=9.0.1" -PyWavelets = ">=1.1.1" -scipy = {version = ">=1.8", markers = "python_version > \"3.9\""} -tifffile = ">=2019.7.26" - -[package.extras] -build = ["Cython (>=0.29.24)", "build", "meson-python (>=0.13.0rc0)", "ninja", "numpy (>=1.21.1)", "packaging (>=20)", "pythran", "setuptools (>=67)", "wheel"] -data = ["pooch (>=1.3.0)"] -default = ["PyWavelets (>=1.1.1)", "imageio (>=2.4.1)", "lazy_loader (>=0.1)", "networkx (>=2.8)", "numpy (>=1.21.1)", "packaging (>=20.0)", "pillow (>=9.0.1)", "scipy (>=1.8)", "scipy (>=1.8,<1.9.2)", "tifffile (>=2019.7.26)"] -developer = ["pre-commit", "rtoml"] -docs = ["dask[array] (>=2022.9.2)", "ipywidgets", "kaleido", "matplotlib (>=3.6)", "myst-parser", "numpydoc (>=1.5)", "pandas (>=1.5)", "plotly (>=5.10)", "pooch (>=1.6)", "pytest-runner", "scikit-learn", "seaborn (>=0.11)", "sphinx (>=5.2)", "sphinx-copybutton", "sphinx-gallery (>=0.11)", "tifffile (>=2022.8.12)"] -optional = ["SimpleITK", "astropy (>=3.1.2)", "cloudpickle (>=0.2.1)", "dask[array] (>=1.0.0,!=2.17.0)", "matplotlib (>=3.3)", "pooch (>=1.3.0)", "pyamg"] -test = ["asv", "codecov", "matplotlib (>=3.3)", "pooch (>=1.3.0)", "pytest (>=5.2.0)", "pytest-cov (>=2.7.0)", "pytest-faulthandler", "pytest-localserver"] - -[[package]] -name = "scikit-learn" -version = "1.5.1" -description = "A set of python modules for machine learning and data mining" -optional = false -python-versions = ">=3.9" -files = [ - {file = "scikit_learn-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:781586c414f8cc58e71da4f3d7af311e0505a683e112f2f62919e3019abd3745"}, - {file = "scikit_learn-1.5.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:f5b213bc29cc30a89a3130393b0e39c847a15d769d6e59539cd86b75d276b1a7"}, - {file = "scikit_learn-1.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ff4ba34c2abff5ec59c803ed1d97d61b036f659a17f55be102679e88f926fac"}, - {file = "scikit_learn-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:161808750c267b77b4a9603cf9c93579c7a74ba8486b1336034c2f1579546d21"}, - {file = "scikit_learn-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:10e49170691514a94bb2e03787aa921b82dbc507a4ea1f20fd95557862c98dc1"}, - {file = "scikit_learn-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:154297ee43c0b83af12464adeab378dee2d0a700ccd03979e2b821e7dd7cc1c2"}, - {file = "scikit_learn-1.5.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:b5e865e9bd59396220de49cb4a57b17016256637c61b4c5cc81aaf16bc123bbe"}, - {file = "scikit_learn-1.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:909144d50f367a513cee6090873ae582dba019cb3fca063b38054fa42704c3a4"}, - {file = "scikit_learn-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689b6f74b2c880276e365fe84fe4f1befd6a774f016339c65655eaff12e10cbf"}, - {file = "scikit_learn-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:9a07f90846313a7639af6a019d849ff72baadfa4c74c778821ae0fad07b7275b"}, - {file = "scikit_learn-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5944ce1faada31c55fb2ba20a5346b88e36811aab504ccafb9f0339e9f780395"}, - {file = "scikit_learn-1.5.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:0828673c5b520e879f2af6a9e99eee0eefea69a2188be1ca68a6121b809055c1"}, - {file = "scikit_learn-1.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:508907e5f81390e16d754e8815f7497e52139162fd69c4fdbd2dfa5d6cc88915"}, - {file = "scikit_learn-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97625f217c5c0c5d0505fa2af28ae424bd37949bb2f16ace3ff5f2f81fb4498b"}, - {file = "scikit_learn-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:da3f404e9e284d2b0a157e1b56b6566a34eb2798205cba35a211df3296ab7a74"}, - {file = "scikit_learn-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:88e0672c7ac21eb149d409c74cc29f1d611d5158175846e7a9c2427bd12b3956"}, - {file = "scikit_learn-1.5.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:7b073a27797a283187a4ef4ee149959defc350b46cbf63a84d8514fe16b69855"}, - {file = "scikit_learn-1.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b59e3e62d2be870e5c74af4e793293753565c7383ae82943b83383fdcf5cc5c1"}, - {file = "scikit_learn-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bd8d3a19d4bd6dc5a7d4f358c8c3a60934dc058f363c34c0ac1e9e12a31421d"}, - {file = "scikit_learn-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:5f57428de0c900a98389c4a433d4a3cf89de979b3aa24d1c1d251802aa15e44d"}, - {file = "scikit_learn-1.5.1.tar.gz", hash = "sha256:0ea5d40c0e3951df445721927448755d3fe1d80833b0b7308ebff5d2a45e6414"}, -] - -[package.dependencies] -joblib = ">=1.2.0" -numpy = ">=1.19.5" -scipy = ">=1.6.0" -threadpoolctl = ">=3.1.0" - -[package.extras] -benchmark = ["matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "pandas (>=1.1.5)"] -build = ["cython (>=3.0.10)", "meson-python (>=0.16.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)"] -docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "polars (>=0.20.23)", "pooch (>=1.6.0)", "pydata-sphinx-theme (>=0.15.3)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=7.3.7)", "sphinx-copybutton (>=0.5.2)", "sphinx-design (>=0.5.0)", "sphinx-gallery (>=0.16.0)", "sphinx-prompt (>=1.4.0)", "sphinx-remove-toctrees (>=1.0.0.post1)", "sphinxcontrib-sass (>=0.3.4)", "sphinxext-opengraph (>=0.9.1)"] -examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] -install = ["joblib (>=1.2.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)", "threadpoolctl (>=3.1.0)"] -maintenance = ["conda-lock (==2.5.6)"] -tests = ["black (>=24.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.9)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.20.23)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.2.1)", "scikit-image (>=0.17.2)"] - -[[package]] -name = "scipy" -version = "1.12.0" -description = "Fundamental algorithms for scientific computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "scipy-1.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:78e4402e140879387187f7f25d91cc592b3501a2e51dfb320f48dfb73565f10b"}, - {file = "scipy-1.12.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:f5f00ebaf8de24d14b8449981a2842d404152774c1a1d880c901bf454cb8e2a1"}, - {file = "scipy-1.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e53958531a7c695ff66c2e7bb7b79560ffdc562e2051644c5576c39ff8efb563"}, - {file = "scipy-1.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e32847e08da8d895ce09d108a494d9eb78974cf6de23063f93306a3e419960c"}, - {file = "scipy-1.12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4c1020cad92772bf44b8e4cdabc1df5d87376cb219742549ef69fc9fd86282dd"}, - {file = "scipy-1.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:75ea2a144096b5e39402e2ff53a36fecfd3b960d786b7efd3c180e29c39e53f2"}, - {file = "scipy-1.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:408c68423f9de16cb9e602528be4ce0d6312b05001f3de61fe9ec8b1263cad08"}, - {file = "scipy-1.12.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5adfad5dbf0163397beb4aca679187d24aec085343755fcdbdeb32b3679f254c"}, - {file = "scipy-1.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3003652496f6e7c387b1cf63f4bb720951cfa18907e998ea551e6de51a04467"}, - {file = "scipy-1.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b8066bce124ee5531d12a74b617d9ac0ea59245246410e19bca549656d9a40a"}, - {file = "scipy-1.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8bee4993817e204d761dba10dbab0774ba5a8612e57e81319ea04d84945375ba"}, - {file = "scipy-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:a24024d45ce9a675c1fb8494e8e5244efea1c7a09c60beb1eeb80373d0fecc70"}, - {file = "scipy-1.12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e7e76cc48638228212c747ada851ef355c2bb5e7f939e10952bc504c11f4e372"}, - {file = "scipy-1.12.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:f7ce148dffcd64ade37b2df9315541f9adad6efcaa86866ee7dd5db0c8f041c3"}, - {file = "scipy-1.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c39f92041f490422924dfdb782527a4abddf4707616e07b021de33467f917bc"}, - {file = "scipy-1.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7ebda398f86e56178c2fa94cad15bf457a218a54a35c2a7b4490b9f9cb2676c"}, - {file = "scipy-1.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:95e5c750d55cf518c398a8240571b0e0782c2d5a703250872f36eaf737751338"}, - {file = "scipy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e646d8571804a304e1da01040d21577685ce8e2db08ac58e543eaca063453e1c"}, - {file = "scipy-1.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:913d6e7956c3a671de3b05ccb66b11bc293f56bfdef040583a7221d9e22a2e35"}, - {file = "scipy-1.12.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba1b0c7256ad75401c73e4b3cf09d1f176e9bd4248f0d3112170fb2ec4db067"}, - {file = "scipy-1.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:730badef9b827b368f351eacae2e82da414e13cf8bd5051b4bdfd720271a5371"}, - {file = "scipy-1.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6546dc2c11a9df6926afcbdd8a3edec28566e4e785b915e849348c6dd9f3f490"}, - {file = "scipy-1.12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:196ebad3a4882081f62a5bf4aeb7326aa34b110e533aab23e4374fcccb0890dc"}, - {file = "scipy-1.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:b360f1b6b2f742781299514e99ff560d1fe9bd1bff2712894b52abe528d1fd1e"}, - {file = "scipy-1.12.0.tar.gz", hash = "sha256:4bf5abab8a36d20193c698b0f1fc282c1d083c94723902c447e5d2f1780936a3"}, -] - -[package.dependencies] -numpy = ">=1.22.4,<1.29.0" - -[package.extras] -dev = ["click", "cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] -doc = ["jupytext", "matplotlib (>2)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] -test = ["asv", "gmpy2", "hypothesis", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] - -[[package]] -name = "setuptools" -version = "70.3.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-70.3.0-py3-none-any.whl", hash = "sha256:fe384da74336c398e0d956d1cae0669bc02eed936cdb1d49b57de1990dc11ffc"}, - {file = "setuptools-70.3.0.tar.gz", hash = "sha256:f171bab1dfbc86b132997f26a119f6056a57950d058587841a0082e8830f9dc5"}, -] - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "sh" -version = "1.14.3" -description = "Python subprocess replacement" -optional = false -python-versions = "*" -files = [ - {file = "sh-1.14.3.tar.gz", hash = "sha256:e4045b6c732d9ce75d571c79f5ac2234edd9ae4f5fa9d59b09705082bdca18c7"}, -] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "soundfile" -version = "0.12.1" -description = "An audio library based on libsndfile, CFFI and NumPy" -optional = false -python-versions = "*" -files = [ - {file = "soundfile-0.12.1-py2.py3-none-any.whl", hash = "sha256:828a79c2e75abab5359f780c81dccd4953c45a2c4cd4f05ba3e233ddf984b882"}, - {file = "soundfile-0.12.1-py2.py3-none-macosx_10_9_x86_64.whl", hash = "sha256:d922be1563ce17a69582a352a86f28ed8c9f6a8bc951df63476ffc310c064bfa"}, - {file = "soundfile-0.12.1-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:bceaab5c4febb11ea0554566784bcf4bc2e3977b53946dda2b12804b4fe524a8"}, - {file = "soundfile-0.12.1-py2.py3-none-manylinux_2_17_x86_64.whl", hash = "sha256:2dc3685bed7187c072a46ab4ffddd38cef7de9ae5eb05c03df2ad569cf4dacbc"}, - {file = "soundfile-0.12.1-py2.py3-none-manylinux_2_31_x86_64.whl", hash = "sha256:074247b771a181859d2bc1f98b5ebf6d5153d2c397b86ee9e29ba602a8dfe2a6"}, - {file = "soundfile-0.12.1-py2.py3-none-win32.whl", hash = "sha256:59dfd88c79b48f441bbf6994142a19ab1de3b9bb7c12863402c2bc621e49091a"}, - {file = "soundfile-0.12.1-py2.py3-none-win_amd64.whl", hash = "sha256:0d86924c00b62552b650ddd28af426e3ff2d4dc2e9047dae5b3d8452e0a49a77"}, - {file = "soundfile-0.12.1.tar.gz", hash = "sha256:e8e1017b2cf1dda767aef19d2fd9ee5ebe07e050d430f77a0a7c66ba08b8cdae"}, -] - -[package.dependencies] -cffi = ">=1.0" - -[package.extras] -numpy = ["numpy"] - -[[package]] -name = "sympy" -version = "1.13.0" -description = "Computer algebra system (CAS) in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "sympy-1.13.0-py3-none-any.whl", hash = "sha256:6b0b32a4673fb91bd3cac3b55406c8e01d53ae22780be467301cc452f6680c92"}, - {file = "sympy-1.13.0.tar.gz", hash = "sha256:3b6af8f4d008b9a1a6a4268b335b984b23835f26d1d60b0526ebc71d48a25f57"}, -] - -[package.dependencies] -mpmath = ">=1.1.0,<1.4" - -[package.extras] -dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"] - -[[package]] -name = "tbb" -version = "2021.13.0" -description = "Intel® oneAPI Threading Building Blocks (oneTBB)" -optional = false -python-versions = "*" -files = [ - {file = "tbb-2021.13.0-py2.py3-none-manylinux1_i686.whl", hash = "sha256:a2567725329639519d46d92a2634cf61e76601dac2f777a05686fea546c4fe4f"}, - {file = "tbb-2021.13.0-py2.py3-none-manylinux1_x86_64.whl", hash = "sha256:aaf667e92849adb012b8874d6393282afc318aca4407fc62f912ee30a22da46a"}, - {file = "tbb-2021.13.0-py3-none-win32.whl", hash = "sha256:6669d26703e9943f6164c6407bd4a237a45007e79b8d3832fe6999576eaaa9ef"}, - {file = "tbb-2021.13.0-py3-none-win_amd64.whl", hash = "sha256:3528a53e4bbe64b07a6112b4c5a00ff3c61924ee46c9c68e004a1ac7ad1f09c3"}, -] - -[[package]] -name = "threadpoolctl" -version = "3.5.0" -description = "threadpoolctl" -optional = false -python-versions = ">=3.8" -files = [ - {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, - {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, -] - -[[package]] -name = "tifffile" -version = "2024.7.2" -description = "Read and write TIFF files" -optional = false -python-versions = ">=3.9" -files = [ - {file = "tifffile-2024.7.2-py3-none-any.whl", hash = "sha256:5a2ee608c9cc1f2e044d943dacebddc71d4827b6fad150ef4c644b7aefbe2d1a"}, - {file = "tifffile-2024.7.2.tar.gz", hash = "sha256:02e52e8872c0e9943add686d2fd8bcfb18f0a824760882cf5e35fcbc2c80e32c"}, -] - -[package.dependencies] -numpy = "*" - -[package.extras] -all = ["defusedxml", "fsspec", "imagecodecs (>=2023.8.12)", "lxml", "matplotlib", "zarr"] - -[[package]] -name = "tokenizers" -version = "0.13.3" -description = "Fast and Customizable Tokenizers" -optional = false -python-versions = "*" -files = [ - {file = "tokenizers-0.13.3-cp310-cp310-macosx_10_11_x86_64.whl", hash = "sha256:f3835c5be51de8c0a092058a4d4380cb9244fb34681fd0a295fbf0a52a5fdf33"}, - {file = "tokenizers-0.13.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4ef4c3e821730f2692489e926b184321e887f34fb8a6b80b8096b966ba663d07"}, - {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5fd1a6a25353e9aa762e2aae5a1e63883cad9f4e997c447ec39d071020459bc"}, - {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee0b1b311d65beab83d7a41c56a1e46ab732a9eed4460648e8eb0bd69fc2d059"}, - {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ef4215284df1277dadbcc5e17d4882bda19f770d02348e73523f7e7d8b8d396"}, - {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4d53976079cff8a033f778fb9adca2d9d69d009c02fa2d71a878b5f3963ed30"}, - {file = "tokenizers-0.13.3-cp310-cp310-win32.whl", hash = "sha256:1f0e3b4c2ea2cd13238ce43548959c118069db7579e5d40ec270ad77da5833ce"}, - {file = "tokenizers-0.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:89649c00d0d7211e8186f7a75dfa1db6996f65edce4b84821817eadcc2d3c79e"}, - {file = "tokenizers-0.13.3-cp311-cp311-macosx_10_11_universal2.whl", hash = "sha256:56b726e0d2bbc9243872b0144515ba684af5b8d8cd112fb83ee1365e26ec74c8"}, - {file = "tokenizers-0.13.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:cc5c022ce692e1f499d745af293ab9ee6f5d92538ed2faf73f9708c89ee59ce6"}, - {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f55c981ac44ba87c93e847c333e58c12abcbb377a0c2f2ef96e1a266e4184ff2"}, - {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f247eae99800ef821a91f47c5280e9e9afaeed9980fc444208d5aa6ba69ff148"}, - {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b3e3215d048e94f40f1c95802e45dcc37c5b05eb46280fc2ccc8cd351bff839"}, - {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ba2b0bf01777c9b9bc94b53764d6684554ce98551fec496f71bc5be3a03e98b"}, - {file = "tokenizers-0.13.3-cp311-cp311-win32.whl", hash = "sha256:cc78d77f597d1c458bf0ea7c2a64b6aa06941c7a99cb135b5969b0278824d808"}, - {file = "tokenizers-0.13.3-cp311-cp311-win_amd64.whl", hash = "sha256:ecf182bf59bd541a8876deccf0360f5ae60496fd50b58510048020751cf1724c"}, - {file = "tokenizers-0.13.3-cp37-cp37m-macosx_10_11_x86_64.whl", hash = "sha256:0527dc5436a1f6bf2c0327da3145687d3bcfbeab91fed8458920093de3901b44"}, - {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07cbb2c307627dc99b44b22ef05ff4473aa7c7cc1fec8f0a8b37d8a64b1a16d2"}, - {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4560dbdeaae5b7ee0d4e493027e3de6d53c991b5002d7ff95083c99e11dd5ac0"}, - {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64064bd0322405c9374305ab9b4c07152a1474370327499911937fd4a76d004b"}, - {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8c6e2ab0f2e3d939ca66aa1d596602105fe33b505cd2854a4c1717f704c51de"}, - {file = "tokenizers-0.13.3-cp37-cp37m-win32.whl", hash = "sha256:6cc29d410768f960db8677221e497226e545eaaea01aa3613fa0fdf2cc96cff4"}, - {file = "tokenizers-0.13.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fc2a7fdf864554a0dacf09d32e17c0caa9afe72baf9dd7ddedc61973bae352d8"}, - {file = "tokenizers-0.13.3-cp38-cp38-macosx_10_11_x86_64.whl", hash = "sha256:8791dedba834c1fc55e5f1521be325ea3dafb381964be20684b92fdac95d79b7"}, - {file = "tokenizers-0.13.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:d607a6a13718aeb20507bdf2b96162ead5145bbbfa26788d6b833f98b31b26e1"}, - {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3791338f809cd1bf8e4fee6b540b36822434d0c6c6bc47162448deee3f77d425"}, - {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2f35f30e39e6aab8716f07790f646bdc6e4a853816cc49a95ef2a9016bf9ce6"}, - {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310204dfed5aa797128b65d63538a9837cbdd15da2a29a77d67eefa489edda26"}, - {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0f9b92ea052305166559f38498b3b0cae159caea712646648aaa272f7160963"}, - {file = "tokenizers-0.13.3-cp38-cp38-win32.whl", hash = "sha256:9a3fa134896c3c1f0da6e762d15141fbff30d094067c8f1157b9fdca593b5806"}, - {file = "tokenizers-0.13.3-cp38-cp38-win_amd64.whl", hash = "sha256:8e7b0cdeace87fa9e760e6a605e0ae8fc14b7d72e9fc19c578116f7287bb873d"}, - {file = "tokenizers-0.13.3-cp39-cp39-macosx_10_11_x86_64.whl", hash = "sha256:00cee1e0859d55507e693a48fa4aef07060c4bb6bd93d80120e18fea9371c66d"}, - {file = "tokenizers-0.13.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:a23ff602d0797cea1d0506ce69b27523b07e70f6dda982ab8cf82402de839088"}, - {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70ce07445050b537d2696022dafb115307abdffd2a5c106f029490f84501ef97"}, - {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:280ffe95f50eaaf655b3a1dc7ff1d9cf4777029dbbc3e63a74e65a056594abc3"}, - {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97acfcec592f7e9de8cadcdcda50a7134423ac8455c0166b28c9ff04d227b371"}, - {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd7730c98a3010cd4f523465867ff95cd9d6430db46676ce79358f65ae39797b"}, - {file = "tokenizers-0.13.3-cp39-cp39-win32.whl", hash = "sha256:48625a108029cb1ddf42e17a81b5a3230ba6888a70c9dc14e81bc319e812652d"}, - {file = "tokenizers-0.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:bc0a6f1ba036e482db6453571c9e3e60ecd5489980ffd95d11dc9f960483d783"}, - {file = "tokenizers-0.13.3.tar.gz", hash = "sha256:2e546dbb68b623008a5442353137fbb0123d311a6d7ba52f2667c8862a75af2e"}, -] - -[package.extras] -dev = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] -docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] -testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] - -[[package]] -name = "torch" -version = "2.3.1" -description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "torch-2.3.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:605a25b23944be5ab7c3467e843580e1d888b8066e5aaf17ff7bf9cc30001cc3"}, - {file = "torch-2.3.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f2357eb0965583a0954d6f9ad005bba0091f956aef879822274b1bcdb11bd308"}, - {file = "torch-2.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:32b05fe0d1ada7f69c9f86c14ff69b0ef1957a5a54199bacba63d22d8fab720b"}, - {file = "torch-2.3.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:7c09a94362778428484bcf995f6004b04952106aee0ef45ff0b4bab484f5498d"}, - {file = "torch-2.3.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:b2ec81b61bb094ea4a9dee1cd3f7b76a44555375719ad29f05c0ca8ef596ad39"}, - {file = "torch-2.3.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:490cc3d917d1fe0bd027057dfe9941dc1d6d8e3cae76140f5dd9a7e5bc7130ab"}, - {file = "torch-2.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:5802530783bd465fe66c2df99123c9a54be06da118fbd785a25ab0a88123758a"}, - {file = "torch-2.3.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:a7dd4ed388ad1f3d502bf09453d5fe596c7b121de7e0cfaca1e2017782e9bbac"}, - {file = "torch-2.3.1-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:a486c0b1976a118805fc7c9641d02df7afbb0c21e6b555d3bb985c9f9601b61a"}, - {file = "torch-2.3.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:224259821fe3e4c6f7edf1528e4fe4ac779c77addaa74215eb0b63a5c474d66c"}, - {file = "torch-2.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:e5fdccbf6f1334b2203a61a0e03821d5845f1421defe311dabeae2fc8fbeac2d"}, - {file = "torch-2.3.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:3c333dc2ebc189561514eda06e81df22bf8fb64e2384746b2cb9f04f96d1d4c8"}, - {file = "torch-2.3.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:07e9ba746832b8d069cacb45f312cadd8ad02b81ea527ec9766c0e7404bb3feb"}, - {file = "torch-2.3.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:462d1c07dbf6bb5d9d2f3316fee73a24f3d12cd8dacf681ad46ef6418f7f6626"}, - {file = "torch-2.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:ff60bf7ce3de1d43ad3f6969983f321a31f0a45df3690921720bcad6a8596cc4"}, - {file = "torch-2.3.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:bee0bd33dc58aa8fc8a7527876e9b9a0e812ad08122054a5bff2ce5abf005b10"}, - {file = "torch-2.3.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:aaa872abde9a3d4f91580f6396d54888620f4a0b92e3976a6034759df4b961ad"}, - {file = "torch-2.3.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:3d7a7f7ef21a7520510553dc3938b0c57c116a7daee20736a9e25cbc0e832bdc"}, - {file = "torch-2.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:4777f6cefa0c2b5fa87223c213e7b6f417cf254a45e5829be4ccd1b2a4ee1011"}, - {file = "torch-2.3.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:2bb5af780c55be68fe100feb0528d2edebace1d55cb2e351de735809ba7391eb"}, -] - -[package.dependencies] -filelock = "*" -fsspec = "*" -jinja2 = "*" -mkl = {version = ">=2021.1.1,<=2021.4.0", markers = "platform_system == \"Windows\""} -networkx = "*" -nvidia-cublas-cu12 = {version = "12.1.3.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-cupti-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-nvrtc-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-runtime-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cudnn-cu12 = {version = "8.9.2.26", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cufft-cu12 = {version = "11.0.2.54", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-curand-cu12 = {version = "10.3.2.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusolver-cu12 = {version = "11.4.5.107", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusparse-cu12 = {version = "12.1.0.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nccl-cu12 = {version = "2.20.5", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nvtx-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -sympy = "*" -triton = {version = "2.3.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.12\""} -typing-extensions = ">=4.8.0" - -[package.extras] -opt-einsum = ["opt-einsum (>=3.3)"] -optree = ["optree (>=0.9.1)"] - -[[package]] -name = "torchvision" -version = "0.18.1" -description = "image and video datasets and models for torch deep learning" -optional = false -python-versions = ">=3.8" -files = [ - {file = "torchvision-0.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3e694e54b0548dad99c12af6bf0c8e4f3350137d391dcd19af22a1c5f89322b3"}, - {file = "torchvision-0.18.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:0b3bda0aa5b416eeb547143b8eeaf17720bdba9cf516dc991aacb81811aa96a5"}, - {file = "torchvision-0.18.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:573ff523c739405edb085f65cb592f482d28a30e29b0be4c4ba08040b3ae785f"}, - {file = "torchvision-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:ef7bbbc60b38e831a75e547c66ca1784f2ac27100f9e4ddbe9614cef6cbcd942"}, - {file = "torchvision-0.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:80b5d794dd0fdba787adc22f1a367a5ead452327686473cb260dd94364bc56a6"}, - {file = "torchvision-0.18.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:9077cf590cdb3a5e8fdf5cdb71797f8c67713f974cf0228ecb17fcd670ab42f9"}, - {file = "torchvision-0.18.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:ceb993a882f1ae7ae373ed39c28d7e3e802205b0e59a7ed84ef4028f0bba8d7f"}, - {file = "torchvision-0.18.1-cp311-cp311-win_amd64.whl", hash = "sha256:52f7436140045dc2239cdc502aa76b2bd8bd676d64244ff154d304aa69852046"}, - {file = "torchvision-0.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2be6f0bf7c455c89a51a1dbb6f668d36c6edc479f49ac912d745d10df5715657"}, - {file = "torchvision-0.18.1-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:f118d887bfde3a948a41d56587525401e5cac1b7db2eaca203324d6ed2b1caca"}, - {file = "torchvision-0.18.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:13d24d904f65e62d66a1e0c41faec630bc193867b8a4a01166769e8a8e8df8e9"}, - {file = "torchvision-0.18.1-cp312-cp312-win_amd64.whl", hash = "sha256:ed6340b69a63a625e512a66127210d412551d9c5f2ad2978130c6a45bf56cd4a"}, - {file = "torchvision-0.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b1c3864fa9378c88bce8ad0ef3599f4f25397897ce612e1c245c74b97092f35e"}, - {file = "torchvision-0.18.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:02085a2ffc7461f5c0edb07d6f3455ee1806561f37736b903da820067eea58c7"}, - {file = "torchvision-0.18.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:9726c316a2501df8503e5a5dc46a631afd4c515a958972e5b7f7b9c87d2125c0"}, - {file = "torchvision-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:64a2662dbf30db9055d8b201d6e56f312a504e5ccd9d144c57c41622d3c524cb"}, - {file = "torchvision-0.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:975b8594c0f5288875408acbb74946eea786c5b008d129c0d045d0ead23742bc"}, - {file = "torchvision-0.18.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:da83c8bbd34d8bee48bfa1d1b40e0844bc3cba10ed825a5a8cbe3ce7b62264cd"}, - {file = "torchvision-0.18.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:54bfcd352abb396d5c9c237d200167c178bd136051b138e1e8ef46ce367c2773"}, - {file = "torchvision-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:5c8366a1aeee49e9ea9e64b30d199debdf06b1bd7610a76165eb5d7869c3bde5"}, -] - -[package.dependencies] -numpy = "*" -pillow = ">=5.3.0,<8.3.dev0 || >=8.4.dev0" -torch = "2.3.1" - -[package.extras] -scipy = ["scipy"] - -[[package]] -name = "tqdm" -version = "4.64.0" -description = "Fast, Extensible Progress Meter" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -files = [ - {file = "tqdm-4.64.0-py2.py3-none-any.whl", hash = "sha256:74a2cdefe14d11442cedf3ba4e21a3b84ff9a2dbdc6cfae2c34addb2a14a5ea6"}, - {file = "tqdm-4.64.0.tar.gz", hash = "sha256:40be55d30e200777a307a7585aee69e4eabb46b4ec6a4b4a5f2d9f11e7d5408d"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["py-make (>=0.1.0)", "twine", "wheel"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - -[[package]] -name = "transformers" -version = "4.29.2" -description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "transformers-4.29.2-py3-none-any.whl", hash = "sha256:0ef158b99bad6f4e6652a0d8655fbbe58b4cb788ce7040f320b5d29c7c810a75"}, - {file = "transformers-4.29.2.tar.gz", hash = "sha256:ed9467661f459f1ce49461d83f18f3b36b6a37f306182dc2ba272935f3b93ebb"}, -] - -[package.dependencies] -filelock = "*" -huggingface-hub = ">=0.14.1,<1.0" -numpy = ">=1.17" -packaging = ">=20.0" -pyyaml = ">=5.1" -regex = "!=2019.12.17" -requests = "*" -tokenizers = ">=0.11.1,<0.11.3 || >0.11.3,<0.14" -tqdm = ">=4.27" - -[package.extras] -accelerate = ["accelerate (>=0.19.0)"] -agents = ["Pillow", "accelerate (>=0.19.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch (>=1.9,!=1.12.0)"] -all = ["Pillow", "accelerate (>=0.19.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.6.9)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "numba (<0.57.0)", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision"] -audio = ["kenlm", "librosa", "numba (<0.57.0)", "phonemizer", "pyctcdecode (>=0.4.0)"] -codecarbon = ["codecarbon (==1.2.0)"] -deepspeed = ["accelerate (>=0.19.0)", "deepspeed (>=0.8.3)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.19.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.8.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf (<=3.20.2)", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "sentencepiece (>=0.1.91,!=0.1.92)", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow", "accelerate (>=0.19.0)", "av (==9.2.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.6.9)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "numba (<0.57.0)", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "numba (<0.57.0)", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "urllib3 (<2.0.0)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow", "accelerate (>=0.19.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "numba (<0.57.0)", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -docs = ["Pillow", "accelerate (>=0.19.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.6.9)", "hf-doc-builder", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "numba (<0.57.0)", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision"] -docs-specific = ["hf-doc-builder"] -fairscale = ["fairscale (>0.3)"] -flax = ["flax (>=0.4.1,<=0.6.9)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "optax (>=0.0.8,<=0.1.4)"] -flax-speech = ["kenlm", "librosa", "numba (<0.57.0)", "phonemizer", "pyctcdecode (>=0.4.0)"] -ftfy = ["ftfy"] -integrations = ["optuna", "ray[tune]", "sigopt"] -ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] -modelcreation = ["cookiecutter (==1.7.3)"] -natten = ["natten (>=0.14.6)"] -onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] -onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] -optuna = ["optuna"] -quality = ["GitPython (<3.1.19)", "black (>=23.1,<24.0)", "datasets (!=2.5.0)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "ruff (>=0.0.241,<=0.0.259)", "urllib3 (<2.0.0)"] -ray = ["ray[tune]"] -retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] -sagemaker = ["sagemaker (>=2.31.0)"] -sentencepiece = ["protobuf (<=3.20.2)", "sentencepiece (>=0.1.91,!=0.1.92)"] -serving = ["fastapi", "pydantic", "starlette", "uvicorn"] -sigopt = ["sigopt"] -sklearn = ["scikit-learn"] -speech = ["kenlm", "librosa", "numba (<0.57.0)", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf (<=3.20.2)", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "timeout-decorator"] -tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx"] -tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx"] -tf-speech = ["kenlm", "librosa", "numba (<0.57.0)", "phonemizer", "pyctcdecode (>=0.4.0)"] -timm = ["timm"] -tokenizers = ["tokenizers (>=0.11.1,!=0.11.3,<0.14)"] -torch = ["accelerate (>=0.19.0)", "torch (>=1.9,!=1.12.0)"] -torch-speech = ["kenlm", "librosa", "numba (<0.57.0)", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -torch-vision = ["Pillow", "torchvision"] -torchhub = ["filelock", "huggingface-hub (>=0.14.1,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf (<=3.20.2)", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "tqdm (>=4.27)"] -video = ["av (==9.2.0)", "decord (==0.6.0)"] -vision = ["Pillow"] - -[[package]] -name = "triton" -version = "2.3.1" -description = "A language and compiler for custom Deep Learning operations" -optional = false -python-versions = "*" -files = [ - {file = "triton-2.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c84595cbe5e546b1b290d2a58b1494df5a2ef066dd890655e5b8a8a92205c33"}, - {file = "triton-2.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9d64ae33bcb3a7a18081e3a746e8cf87ca8623ca13d2c362413ce7a486f893e"}, - {file = "triton-2.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaf80e8761a9e3498aa92e7bf83a085b31959c61f5e8ac14eedd018df6fccd10"}, - {file = "triton-2.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b13bf35a2b659af7159bf78e92798dc62d877aa991de723937329e2d382f1991"}, - {file = "triton-2.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63381e35ded3304704ea867ffde3b7cfc42c16a55b3062d41e017ef510433d66"}, - {file = "triton-2.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d968264523c7a07911c8fb51b4e0d1b920204dae71491b1fe7b01b62a31e124"}, -] - -[package.dependencies] -filelock = "*" - -[package.extras] -build = ["cmake (>=3.20)", "lit"] -tests = ["autopep8", "flake8", "isort", "numpy", "pytest", "scipy (>=1.7.1)", "torch"] -tutorials = ["matplotlib", "pandas", "tabulate", "torch"] - -[[package]] -name = "tritonclient" -version = "2.41.0" -description = "Python client library and utilities for communicating with Triton Inference Server" -optional = false -python-versions = "*" -files = [ - {file = "tritonclient-2.41.0-py3-none-any.whl", hash = "sha256:ee543c3436f1a068f4f4a243a729e27006b07ea6477b38ecd8f62d167835e092"}, - {file = "tritonclient-2.41.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:cf4ed5df836fa99937678a9fd852f86ba05c7b83656dd0a540e20afdb9ab4629"}, - {file = "tritonclient-2.41.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:2314843f6a3c50a8e20166e56cc47af9ac41a3c100e919eddbbefa5ba09bd1b9"}, -] - -[package.dependencies] -aiohttp = {version = ">=3.8.1,<4.0.0", optional = true, markers = "extra == \"all\""} -cuda-python = {version = "*", optional = true, markers = "extra == \"all\""} -geventhttpclient = {version = ">=1.4.4,<=2.0.2", optional = true, markers = "extra == \"all\""} -grpcio = {version = ">=1.41.0", optional = true, markers = "extra == \"all\""} -numpy = ">=1.19.1" -packaging = {version = ">=14.1", optional = true, markers = "extra == \"all\""} -python-rapidjson = ">=0.9.1" - -[package.extras] -all = ["aiohttp (>=3.8.1,<4.0.0)", "cuda-python", "geventhttpclient (>=1.4.4,<=2.0.2)", "grpcio (>=1.41.0)", "numpy (>=1.19.1)", "packaging (>=14.1)", "python-rapidjson (>=0.9.1)"] -cuda = ["cuda-python"] -grpc = ["grpcio (>=1.41.0)", "numpy (>=1.19.1)", "packaging (>=14.1)", "python-rapidjson (>=0.9.1)"] -http = ["aiohttp (>=3.8.1,<4.0.0)", "geventhttpclient (>=1.4.4,<=2.0.2)", "numpy (>=1.19.1)", "python-rapidjson (>=0.9.1)"] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - -[[package]] -name = "typing-inspect" -version = "0.6.0" -description = "Runtime inspection utilities for typing module." -optional = false -python-versions = "*" -files = [ - {file = "typing_inspect-0.6.0-py2-none-any.whl", hash = "sha256:de08f50a22955ddec353876df7b2545994d6df08a2f45d54ac8c05e530372ca0"}, - {file = "typing_inspect-0.6.0-py3-none-any.whl", hash = "sha256:3b98390df4d999a28cf5b35d8b333425af5da2ece8a4ea9e98f71e7591347b4f"}, - {file = "typing_inspect-0.6.0.tar.gz", hash = "sha256:8f1b1dd25908dbfd81d3bebc218011531e7ab614ba6e5bf7826d887c834afab7"}, -] - -[package.dependencies] -mypy-extensions = ">=0.3.0" -typing-extensions = ">=3.7.4" - -[[package]] -name = "urllib3" -version = "2.2.2" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "wrapt" -version = "1.16.0" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = ">=3.6" -files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, -] - -[[package]] -name = "yarl" -version = "1.9.4" -description = "Yet another URL library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, - {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, - {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, - {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, - {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, -] - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - -[[package]] -name = "zope-event" -version = "5.0" -description = "Very basic event publishing system" -optional = false -python-versions = ">=3.7" -files = [ - {file = "zope.event-5.0-py3-none-any.whl", hash = "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26"}, - {file = "zope.event-5.0.tar.gz", hash = "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd"}, -] - -[package.dependencies] -setuptools = "*" - -[package.extras] -docs = ["Sphinx"] -test = ["zope.testrunner"] - -[[package]] -name = "zope-interface" -version = "6.4.post2" -description = "Interfaces for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "zope.interface-6.4.post2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2eccd5bef45883802848f821d940367c1d0ad588de71e5cabe3813175444202c"}, - {file = "zope.interface-6.4.post2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:762e616199f6319bb98e7f4f27d254c84c5fb1c25c908c2a9d0f92b92fb27530"}, - {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef8356f16b1a83609f7a992a6e33d792bb5eff2370712c9eaae0d02e1924341"}, - {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e4fa5d34d7973e6b0efa46fe4405090f3b406f64b6290facbb19dcbf642ad6b"}, - {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d22fce0b0f5715cdac082e35a9e735a1752dc8585f005d045abb1a7c20e197f9"}, - {file = "zope.interface-6.4.post2-cp310-cp310-win_amd64.whl", hash = "sha256:97e615eab34bd8477c3f34197a17ce08c648d38467489359cb9eb7394f1083f7"}, - {file = "zope.interface-6.4.post2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:599f3b07bde2627e163ce484d5497a54a0a8437779362395c6b25e68c6590ede"}, - {file = "zope.interface-6.4.post2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:136cacdde1a2c5e5bc3d0b2a1beed733f97e2dad8c2ad3c2e17116f6590a3827"}, - {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47937cf2e7ed4e0e37f7851c76edeb8543ec9b0eae149b36ecd26176ff1ca874"}, - {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f0a6be264afb094975b5ef55c911379d6989caa87c4e558814ec4f5125cfa2e"}, - {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47654177e675bafdf4e4738ce58cdc5c6d6ee2157ac0a78a3fa460942b9d64a8"}, - {file = "zope.interface-6.4.post2-cp311-cp311-win_amd64.whl", hash = "sha256:e2fb8e8158306567a3a9a41670c1ff99d0567d7fc96fa93b7abf8b519a46b250"}, - {file = "zope.interface-6.4.post2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b912750b13d76af8aac45ddf4679535def304b2a48a07989ec736508d0bbfbde"}, - {file = "zope.interface-6.4.post2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ac46298e0143d91e4644a27a769d1388d5d89e82ee0cf37bf2b0b001b9712a4"}, - {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86a94af4a88110ed4bb8961f5ac72edf782958e665d5bfceaab6bf388420a78b"}, - {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73f9752cf3596771c7726f7eea5b9e634ad47c6d863043589a1c3bb31325c7eb"}, - {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b5c3e9744dcdc9e84c24ed6646d5cf0cf66551347b310b3ffd70f056535854"}, - {file = "zope.interface-6.4.post2-cp312-cp312-win_amd64.whl", hash = "sha256:551db2fe892fcbefb38f6f81ffa62de11090c8119fd4e66a60f3adff70751ec7"}, - {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96ac6b3169940a8cd57b4f2b8edcad8f5213b60efcd197d59fbe52f0accd66e"}, - {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cebff2fe5dc82cb22122e4e1225e00a4a506b1a16fafa911142ee124febf2c9e"}, - {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33ee982237cffaf946db365c3a6ebaa37855d8e3ca5800f6f48890209c1cfefc"}, - {file = "zope.interface-6.4.post2-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:fbf649bc77510ef2521cf797700b96167bb77838c40780da7ea3edd8b78044d1"}, - {file = "zope.interface-6.4.post2-cp37-cp37m-win_amd64.whl", hash = "sha256:4c0b208a5d6c81434bdfa0f06d9b667e5de15af84d8cae5723c3a33ba6611b82"}, - {file = "zope.interface-6.4.post2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d3fe667935e9562407c2511570dca14604a654988a13d8725667e95161d92e9b"}, - {file = "zope.interface-6.4.post2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a96e6d4074db29b152222c34d7eec2e2db2f92638d2b2b2c704f9e8db3ae0edc"}, - {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:866a0f583be79f0def667a5d2c60b7b4cc68f0c0a470f227e1122691b443c934"}, - {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fe919027f29b12f7a2562ba0daf3e045cb388f844e022552a5674fcdf5d21f1"}, - {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e0343a6e06d94f6b6ac52fbc75269b41dd3c57066541a6c76517f69fe67cb43"}, - {file = "zope.interface-6.4.post2-cp38-cp38-win_amd64.whl", hash = "sha256:dabb70a6e3d9c22df50e08dc55b14ca2a99da95a2d941954255ac76fd6982bc5"}, - {file = "zope.interface-6.4.post2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:706efc19f9679a1b425d6fa2b4bc770d976d0984335eaea0869bd32f627591d2"}, - {file = "zope.interface-6.4.post2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d136e5b8821073e1a09dde3eb076ea9988e7010c54ffe4d39701adf0c303438"}, - {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1730c93a38b5a18d24549bc81613223962a19d457cfda9bdc66e542f475a36f4"}, - {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc2676312cc3468a25aac001ec727168994ea3b69b48914944a44c6a0b251e79"}, - {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a62fd6cd518693568e23e02f41816adedfca637f26716837681c90b36af3671"}, - {file = "zope.interface-6.4.post2-cp39-cp39-win_amd64.whl", hash = "sha256:d3f7e001328bd6466b3414215f66dde3c7c13d8025a9c160a75d7b2687090d15"}, - {file = "zope.interface-6.4.post2.tar.gz", hash = "sha256:1c207e6f6dfd5749a26f5a5fd966602d6b824ec00d2df84a7e9a924e8933654e"}, -] - -[package.dependencies] -setuptools = "*" - -[package.extras] -docs = ["Sphinx", "repoze.sphinx.autointerface", "sphinx-rtd-theme"] -test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] -testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] - -[metadata] -lock-version = "2.0" -python-versions = "^3.10" -content-hash = "36b0d8bae6f499957d37c37266649a6810ddb42ba225349cdc25a839317d1e9d" diff --git a/stf/stf-api-alternative/pyproject.toml b/stf/stf-api-alternative/pyproject.toml deleted file mode 100644 index 5874e6e7588b91d6089df5cab5a91b2314abf32c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pyproject.toml +++ /dev/null @@ -1,35 +0,0 @@ -[tool.poetry] -name = "stf-alternative" -version = "0.1.0" -description = "alternative version of stf-api" -authors = ["Kim Minjong "] -readme = "README.md" -packages = [ - {include = "stf_alternative", from="src"} -] - -[tool.poetry.dependencies] -python = "^3.10" -librosa = "0.8.1" -imageio = "2.13.5" -imageio-ffmpeg = "0.4.5" -Pillow = "9.1.0" -tqdm = "4.64.0" -numpy = "1.24.4" -addict = "2.4.0" -scipy = "1.12.0" -pandas = "1.3.5" -face_alignment = "1.3.5" -moviepy = "1.0.3" -transformers = "4.29.2" -facenet_pytorch = "2.5.2" -ffmpeg-python = "^0.2" -pydub = "^0.25" -av = "^11.0.0" -nvidia-pytriton = {extras = ["client"], version = "^0.4.2"} -asyncstdlib = "^3.10.9" - - -[build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" diff --git a/stf/stf-api-alternative/pytriton/.flake8 b/stf/stf-api-alternative/pytriton/.flake8 deleted file mode 100644 index c2fa9d76e7e191b97456af0bce5a7079fca24c1b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/.flake8 +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -[flake8] -exclude = docs,experiments,blueprints,pytriton/tritonserver,sandbox -ignore = E203, E266, E501, W503 -max-line-length = 120 -max-complexity = 18 -select = B,C,D,E,F,W,T,N diff --git a/stf/stf-api-alternative/pytriton/.github/ISSUE_TEMPLATE/bug_report.md b/stf/stf-api-alternative/pytriton/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 4141877af12d4a6b460f6fcf724e0dc2e96615e1..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,83 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve -title: '' -labels: '' -assignees: '' - ---- - -**Description** - -A clear and concise description of the bug. - -**To reproduce** - -If relevant, add a minimal example so that we can reproduce the error, if necessary, by running the code. For example: - -```python -# server -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton - -@batch -def _infer_fn(**inputs): - ... - results_dict = model(**inputs) # ex note: the bug is here, we expect to receive ... - ... - # note: observing results_dict as dictionary of numpy arrays - return results_dict - - -with Triton() as triton: - triton.bind( - model_name="MyModel", - infer_func=_infer_fn, - inputs=[ - Tensor(name="in1", dtype=np.float32, shape=(-1,)), - Tensor(name="in2", dtype=np.float32, shape=(-1,)), - ], - outputs=[ - Tensor(name="out1", dtype=np.float32, shape=(-1,)), - Tensor(name="out2", dtype=np.float32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128), - ) - triton.serve() -``` - -```python -# client -import numpy as np -from pytriton.client import ModelClient - -batch_size = 2 -in1_batch = np.ones((batch_size, 1), dtype=np.float32) -in2_batch = np.ones((batch_size, 1), dtype=np.float32) - -with ModelClient("localhost", "MyModel") as client: - result_batch = client.infer_batch(in1_batch, in2_batch) -``` - -**Observed results and expected behavior** - -Please describe the observed results as well as the expected results. -If possible, attach relevant log output to help analyze your problem. -If an error is raised, please paste the full traceback of the exception. - -``` - -``` - -**Environment** - -- OS/container version: [e.g., container nvcr.io/nvidia/pytorch:23.02-py3 / virtual machine with Ubuntu 22.04] - - glibc version: [e.g., 2.31; can be checked with `ldd --version`] -- Python interpreter distribution and version: [e.g., CPython 3.8 / conda 4.7.12 with Python 3.8 environment] -- pip version: [e.g., 23.1.2] -- PyTriton version: [e.g., 0.1.4 / custom build from source at commit ______] -- Deployment details: [e.g., multi-node multi-GPU setup on GKE / multi-GPU single-node setup in Jupyter Notebook] - -**Additional context** -Add any other context about the problem here. diff --git a/stf/stf-api-alternative/pytriton/.github/ISSUE_TEMPLATE/feature_request.md b/stf/stf-api-alternative/pytriton/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index bbcbbe7d61558adde3cbfd0c7a63a67c27ed6d30..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this project -title: '' -labels: '' -assignees: '' - ---- - -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - -**Describe the solution you'd like** -A clear and concise description of what you want to happen. - -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - -**Additional context** -Add any other context or screenshots about the feature request here. diff --git a/stf/stf-api-alternative/pytriton/.github/workflows/stale.yaml b/stf/stf-api-alternative/pytriton/.github/workflows/stale.yaml deleted file mode 100644 index 8d20d00f6c6a88eeb9d41b1b1fec0a03d915b738..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/.github/workflows/stale.yaml +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -name: 'Close stale issues and PRs' -on: - schedule: - - cron: "30 1 * * *" -jobs: - stale: - if: github.repository_owner == 'triton-inference-server' - runs-on: ubuntu-latest - permissions: - issues: write - pull-requests: write - steps: - - uses: actions/stale@v8 - with: - days-before-stale: 21 - days-before-close: 7 - stale-issue-message: 'This issue is stale because it has been open 21 days with no activity. Remove stale label or comment or this will be closed in 7 days.' - stale-pr-message: 'This PR is stale because it has been open 21 days with no activity. Remove stale label or comment or this will be closed in 7 days.' - close-issue-message: 'This issue was closed because it has been stalled for 7 days with no activity.' - close-pr-message: 'This PR was closed because it has been stalled for 7 days with no activity.' - exempt-issue-labels: 'non-stale' - exempt-pr-labels: 'non-stale' diff --git a/stf/stf-api-alternative/pytriton/.gitignore b/stf/stf-api-alternative/pytriton/.gitignore deleted file mode 100644 index fcba59994f3859f6b6c44003c1af7107b95232ea..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/.gitignore +++ /dev/null @@ -1,330 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# Created by https://www.toptal.com/developers/gitignore/api/pycharm+all,visualstudiocode,python,direnv,vim -# Edit at https://www.toptal.com/developers/gitignore?templates=pycharm+all,visualstudiocode,python,direnv,vim - -### direnv ### -.direnv -.envrc - -### PyCharm+all ### -# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider -# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 - -# User-specific stuff -.idea/**/workspace.xml -.idea/**/tasks.xml -.idea/**/usage.statistics.xml -.idea/**/dictionaries -.idea/**/shelf - -# AWS User-specific -.idea/**/aws.xml - -# Generated files -.idea/**/contentModel.xml - -# Sensitive or high-churn files -.idea/**/dataSources/ -.idea/**/dataSources.ids -.idea/**/dataSources.local.xml -.idea/**/sqlDataSources.xml -.idea/**/dynamic.xml -.idea/**/uiDesigner.xml -.idea/**/dbnavigator.xml - -# Gradle -.idea/**/gradle.xml -.idea/**/libraries - -# Gradle and Maven with auto-import -# When using Gradle or Maven with auto-import, you should exclude module files, -# since they will be recreated, and may cause churn. Uncomment if using -# auto-import. -# .idea/artifacts -# .idea/compiler.xml -# .idea/jarRepositories.xml -# .idea/modules.xml -# .idea/*.iml -# .idea/modules -# *.iml -# *.ipr - -# CMake -cmake-build-*/ - -# Mongo Explorer plugin -.idea/**/mongoSettings.xml - -# File-based project format -*.iws - -# IntelliJ -out/ - -# mpeltonen/sbt-idea plugin -.idea_modules/ - -# JIRA plugin -atlassian-ide-plugin.xml - -# Cursive Clojure plugin -.idea/replstate.xml - -# SonarLint plugin -.idea/sonarlint/ - -# Crashlytics plugin (for Android Studio and IntelliJ) -com_crashlytics_export_strings.xml -crashlytics.properties -crashlytics-build.properties -fabric.properties - -# Editor-based Rest Client -.idea/httpRequests - -# Android studio 3.1+ serialized cache file -.idea/caches/build_file_checksums.ser - -### PyCharm+all Patch ### -# Ignore everything but code style settings and run configurations -# that are supposed to be shared within teams. - -.idea/* - -!.idea/codeStyles -!.idea/runConfigurations - -### Python ### -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -share/python-wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.nox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -*.py,cover -.hypothesis/ -.pytest_cache/ -cover/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py -db.sqlite3 -db.sqlite3-journal - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -.pybuilder/ -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# IPython -profile_default/ -ipython_config.py - -# pyenv -# For a library or package, you might want to ignore these files since the code is -# intended to run in multiple environments; otherwise, check them in: -# .python-version - -# pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not -# install all needed dependencies. -#Pipfile.lock - -# poetry -# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. -# This is especially recommended for binary packages to ensure reproducibility, and is more -# commonly ignored for libraries. -# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control -#poetry.lock - -# pdm -# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. -#pdm.lock -# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it -# in version control. -# https://pdm.fming.dev/#use-with-ide -.pdm.toml - -# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm -__pypackages__/ - -# Celery stuff -celerybeat-schedule -celerybeat.pid - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ -.dmypy.json -dmypy.json - -# Pyre type checker -.pyre/ - -# pytype static type analyzer -.pytype/ - -# Cython debug symbols -cython_debug/ - -# PyCharm -# JetBrains specific template is maintained in a separate JetBrains.gitignore that can -# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore -# and can be added to the global gitignore or merged into this file. For a more nuclear -# option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ - -### Python Patch ### -# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration -poetry.toml - -# ruff -.ruff_cache/ - -# LSP config files -pyrightconfig.json - -### Vim ### -# Swap -[._]*.s[a-v][a-z] -!*.svg # comment out if you don't need vector files -[._]*.sw[a-p] -[._]s[a-rt-v][a-z] -[._]ss[a-gi-z] -[._]sw[a-p] - -# Session -Session.vim -Sessionx.vim - -# Temporary -.netrwhist -*~ -# Auto-generated tag files -tags -# Persistent undo -[._]*.un~ - -### VisualStudioCode ### -.vscode/* -!.vscode/settings.json -!.vscode/tasks.json -!.vscode/launch.json -!.vscode/extensions.json -!.vscode/*.code-snippets - -# Local History for Visual Studio Code -.history/ - -# Built Visual Studio Code Extensions -*.vsix - -### VisualStudioCode Patch ### -# Ignore all local history of files -.history -.ionide - -# End of https://www.toptal.com/developers/gitignore/api/pycharm+all,visualstudiocode,python,direnv,vim -pytriton/tritonserver -docs/CHANGELOG.md -docs/CONTRIBUTING.md -docs/LICENSE.md -docs/examples.md - -### VisualStudioCode+all ## -.vscode -.devcontainer diff --git a/stf/stf-api-alternative/pytriton/.pre-commit-config.yaml b/stf/stf-api-alternative/pytriton/.pre-commit-config.yaml deleted file mode 100644 index cad46a35981de7d448af7fe4a0cdf9fd25fda5ff..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/.pre-commit-config.yaml +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -exclude: kubernetes -repos: - - repo: https://github.com/ambv/black - rev: 23.11.0 - hooks: - - id: black - - repo: https://github.com/pycqa/isort - rev: 5.12.0 - hooks: - - id: isort - name: isort (python) - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 - hooks: - - id: check-docstring-first - - id: check-executables-have-shebangs - - id: check-json - - id: check-merge-conflict - - id: detect-private-key - - id: check-shebang-scripts-are-executable - - id: check-toml - - id: check-yaml - - id: debug-statements - - id: end-of-file-fixer - types: [python] - - id: fix-byte-order-marker - - id: no-commit-to-branch - - id: requirements-txt-fixer - - id: trailing-whitespace - exclude: setup.cfg - - id: mixed-line-ending - args: [--fix=lf] - - repo: https://github.com/asottile/pyupgrade - rev: v3.15.0 - hooks: - - id: pyupgrade - args: [--py36-plus] - - repo: https://github.com/pycqa/flake8 - rev: 6.1.0 - hooks: - - id: flake8 - additional_dependencies: - - flake8-bugbear - - flake8-comprehensions - - flake8-print - - mccabe - - pep8-naming - - pycodestyle - - pyflakes - - repo: https://github.com/pycqa/pydocstyle - rev: 6.3.0 - hooks: - - id: pydocstyle - name: Run pydocstyle - args: - - --convention=google - exclude: '(?:tests|examples)\/.*' - additional_dependencies: ['toml'] - - repo: https://github.com/thlorenz/doctoc - rev: v2.2.0 - hooks: - - id: doctoc - args: [ --github, --update-only ] diff --git a/stf/stf-api-alternative/pytriton/CHANGELOG.md b/stf/stf-api-alternative/pytriton/CHANGELOG.md deleted file mode 100644 index 67ff4fc2701fd2e2e96a1f86a39db4f0f3d82153..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/CHANGELOG.md +++ /dev/null @@ -1,239 +0,0 @@ - - -# Changelog - -## 0.4.2 (2023-12-05) - -- New: You can create client from existing client instance or model configuration to avoid loading model configuration from server. -- New: Introduced warning system using the `warnings` module. -- Fix: Experimental client for decoupled models prevents sending another request, when responses from previous request are not consumed, blocks close until stream is stopped. -- Fix: Leak of ModelClient during Triton creation -- Fix: Fixed non-declared project dependencies (removed from use in code or added to package dependencies) -- Fix: Remote model is being unloaded from Triton when RemoteTriton is closed. - -[//]: <> (put here on external component update with short summary what change or link to changelog) - -- Version of [Triton Inference Server](https://github.com/triton-inference-server/) embedded in wheel: [2.39.0](https://github.com/triton-inference-server/server/releases/tag/v2.39.0) - -## 0.4.1 (2023-11-09) - -- New: Place where workspaces with temporary Triton model repositories and communication file sockets can be configured by `$PYTRITON_HOME` environment variable -- Fix: Recover handling `KeyboardInterrupt` in `triton.serve()` -- Fix: Remove limit for handling bytes dtype tensors -- Build scripts update - - Added support for arm64 platform builds - -[//]: <> (put here on external component update with short summary what change or link to changelog) - -- Version of [Triton Inference Server](https://github.com/triton-inference-server/) embedded in wheel: [2.39.0](https://github.com/triton-inference-server/server/releases/tag/v2.39.0) - -## 0.4.0 (2023-10-20) - -- New: Remote Mode - PyTriton can be used to connect to a remote Triton Inference Server - - Introduced RemoteTriton class which can be used to connect to a remote Triton Inference Server - running on the same machine, by passing triton url. - - Changed Triton lifecycle - now the Triton Inference Server is started while entering the context. - This allows to load models dynamically to the running server while calling the bind method. - It is still allowed to create Triton instance without entering the context and bind models before starting - the server (in this case the models are lazy loaded when calling run or serve method like it worked before). - - In RemoteTriton class, calling __enter__ or connect method connects to triton server, so we can safely load models - while binding inference functions (if RemoteTriton is used without context manager, models are lazy loaded - when calling connect or serve method). -- Change: `@batch` decorator raises a `ValueError` if any of the outputs have a different batch size than expected. -- fix: gevent resources leak in ``FuturesModelClient`` - -[//]: <> (put here on external component update with short summary what change or link to changelog) - -- Version of [Triton Inference Server](https://github.com/triton-inference-server/) embedded in wheel: [2.36.0](https://github.com/triton-inference-server/server/releases/tag/v2.36.0) - -## 0.3.1 (2023-09-26) - -- Change: `KeyboardInterrupt` is now handled in `triton.serve()`. PyTriton hosting scripts return an exit code of 0 instead of 130 when they receive a SIGINT signal. -- Fix: Addressed potential instability in shared memory management. - -[//]: <> (put here on external component update with short summary what change or link to changelog) - -- Version of [Triton Inference Server](https://github.com/triton-inference-server/) embedded in wheel: [2.36.0](https://github.com/triton-inference-server/server/releases/tag/v2.36.0) - -## 0.3.0 (2023-09-05) - -- new: Support for multiple Python versions starting from 3.8+ -- new: Added support for [decoupled models](https://github.com/triton-inference-server/server/blob/main/docs/user_guide/decoupled_models.md) enabling to support streaming models (alpha state) -- change: Upgraded Triton Inference Server binaries to version 2.36.0. Note that this Triton Inference Server requires glibc 2.35+ or a more recent version. - -[//]: <> (put here on external component update with short summary what change or link to changelog) - -- Version of [Triton Inference Server](https://github.com/triton-inference-server/) embedded in wheel: [2.36.0](https://github.com/triton-inference-server/server/releases/tag/v2.36.0) - - -## 0.2.5 (2023-08-24) - -- new: Allow to execute multiple PyTriton instances in the same process and/or host -- fix: Invalid flags for Proxy Backend configuration passed to Triton - - -[//]: <> (put here on external component update with short summary what change or link to changelog) - -- Version of [Triton Inference Server](https://github.com/triton-inference-server/) embedded in wheel: [2.33.0](https://github.com/triton-inference-server/server/releases/tag/v2.33.0) - -## 0.2.4 (2023-08-10) - -- new: Introduced `strict` flag in `Triton.bind` which enables data types and shapes validation of inference callable outputs - against model config -- new: `AsyncioModelClient` which works in FastAPI and other async frameworks -- fix: `FuturesModelClient` do not raise `gevent.exceptions.InvalidThreadUseError` -- fix: Do not throw TimeoutError if could not connect to server during model verification - -[//]: <> (put here on external component update with short summary what change or link to changelog) - -- Version of [Triton Inference Server](https://github.com/triton-inference-server/) embedded in wheel: [2.33.0](https://github.com/triton-inference-server/server/releases/tag/v2.33.0) - -## 0.2.3 (2023-07-21) - -- Improved verification of Proxy Backend environment when running under same Python interpreter -- Fixed pytriton.__version__ to represent currently installed version - -[//]: <> (put here on external component update with short summary what change or link to changelog) - -- Version of [Triton Inference Server](https://github.com/triton-inference-server/) embedded in wheel: [2.33.0](https://github.com/triton-inference-server/server/releases/tag/v2.33.0) - -## 0.2.2 (2023-07-19) - -- Added `inference_timeout_s` parameters to client classes -- Renamed `PyTritonClientUrlParseError` to `PyTritonClientInvalidUrlError` -- `ModelClient` and `FuturesModelClient` methods raise `PyTritonClientClosedError` when used after client is closed -- Pinned tritonclient dependency due to issues with tritonclient >= 2.34 on systems with glibc version lower than 2.34 -- Added warning after Triton Server setup and teardown while using too verbose logging level as it may cause a significant performance drop in model inference - -[//]: <> (put here on external component update with short summary what change or link to changelog) - -- Version of [Triton Inference Server](https://github.com/triton-inference-server/) embedded in wheel: [2.33.0](https://github.com/triton-inference-server/server/releases/tag/v2.33.0) - -## 0.2.1 (2023-06-28) - -- Fixed handling `TritonConfig.cache_directory` option - the directory was always overwritten with the default value. -- Fixed tritonclient dependency - PyTriton need tritonclient supporting http headers and parameters -- Improved shared memory usage to match 64MB limit (default value for Docker, Kubernetes) reducing the initial size for PyTriton Proxy Backend. - -[//]: <> (put here on external component update with short summary what change or link to changelog) - -- Version of [Triton Inference Server](https://github.com/triton-inference-server/) embedded in wheel: [2.33.0](https://github.com/triton-inference-server/server/releases/tag/v2.33.0) - -## 0.2.0 (2023-05-30) - -- Added support for using custom HTTP/gRPC request headers and parameters. - - This change breaks backward compatibility of the inference function signature. - The undecorated inference function now accepts a list of `Request` instances instead - of a list of dictionaries. The `Request` class contains data for inputs and parameters - for combined parameters and headers. - - See [docs/custom_params.md](docs/custom_params.md) for further information - -- Added `FuturesModelClient` which enables sending inference requests in a parallel manner. -- Added displaying documentation link after models are loaded. - -[//]: <> (put here on external component update with short summary what change or link to changelog) - -- Version of [Triton Inference Server](https://github.com/triton-inference-server/) embedded in wheel: [2.33.0](https://github.com/triton-inference-server/server/releases/tag/v2.33.0) - -## 0.1.5 (2023-05-12) - -- Improved `pytriton.decorators.group_by_values` function - - Modified the function to avoid calling the inference callable on each individual sample when grouping by string/bytes input - - Added `pad_fn` argument for easy padding and combining of the inference results -- Fixed Triton binaries search -- Improved Workspace management (remove workspace on shutdown) - -[//]: <> (put here on external component update with short summary what change or link to changelog) - -- Version of external components used during testing: - - [Triton Inference Server](https://github.com/triton-inference-server/): 2.29.0 - - Other component versions depend on the used framework and Triton Inference Server containers versions. - Refer to its [support matrix](https://docs.nvidia.com/deeplearning/frameworks/support-matrix/index.html) - for a detailed summary. - -## 0.1.4 (2023-03-16) - -- Add validation of the model name passed to Triton bind method. -- Add monkey patching of `InferenceServerClient.__del__` method to prevent unhandled exceptions. - -[//]: <> (put here on external component update with short summary what change or link to changelog) - -- Version of external components used during testing: - - [Triton Inference Server](https://github.com/triton-inference-server/): 2.29.0 - - Other component versions depend on the used framework and Triton Inference Server containers versions. - Refer to its [support matrix](https://docs.nvidia.com/deeplearning/frameworks/support-matrix/index.html) - for a detailed summary. - -## 0.1.3 (2023-02-20) - -- Fixed getting model config in `fill_optionals` decorator. - -[//]: <> (put here on external component update with short summary what change or link to changelog) - -- Version of external components used during testing: - - [Triton Inference Server](https://github.com/triton-inference-server/): 2.29.0 - - Other component versions depend on the used framework and Triton Inference Server containers versions. - Refer to its [support matrix](https://docs.nvidia.com/deeplearning/frameworks/support-matrix/index.html) - for a detailed summary. - -## 0.1.2 (2023-02-14) - -- Fixed wheel build to support installations on operating systems with glibc version 2.31 or higher. -- Updated the documentation on custom builds of the package. -- Change: TritonContext instance is shared across bound models and contains model_configs dictionary. -- Fixed support of binding multiple models that uses methods of the same class. - -[//]: <> (put here on external component update with short summary what change or link to changelog) - -- Version of external components used during testing: - - [Triton Inference Server](https://github.com/triton-inference-server/): 2.29.0 - - Other component versions depend on the used framework and Triton Inference Server containers versions. - Refer to its [support matrix](https://docs.nvidia.com/deeplearning/frameworks/support-matrix/index.html) - for a detailed summary. - -## 0.1.1 (2023-01-31) - -- Change: The `@first_value` decorator has been updated with new features: - - Renamed from `@first_values` to `@first_value` - - Added a `strict` flag to toggle the checking of equality of values on a single selected input of the request. Default is True - - Added a `squeeze_single_values` flag to toggle the squeezing of single value ND arrays to scalars. Default is True -- Fix: `@fill_optionals` now supports non-batching models -- Fix: `@first_value` fixed to work with optional inputs -- Fix: `@group_by_values` fixed to work with string inputs -- Fix: `@group_by_values` fixed to work per sample-wise - -[//]: <> (put here on external component update with short summary what change or link to changelog) - -- Version of external components used during testing: - - [Triton Inference Server](https://github.com/triton-inference-server/): 2.29.0 - - Other component versions depend on the used framework and Triton Inference Server containers versions. - Refer to its [support matrix](https://docs.nvidia.com/deeplearning/frameworks/support-matrix/index.html) - for a detailed summary. - -## 0.1.0 (2023-01-12) - -- Initial release of PyTriton - -[//]: <> (put here on external component update with short summary what change or link to changelog) - -- Version of external components used during testing: - - [Triton Inference Server](https://github.com/triton-inference-server/): 2.29.0 - - Other component versions depend on the used framework and Triton Inference Server containers versions. - Refer to its [support matrix](https://docs.nvidia.com/deeplearning/frameworks/support-matrix/index.html) - for a detailed summary. diff --git a/stf/stf-api-alternative/pytriton/CONTRIBUTING.md b/stf/stf-api-alternative/pytriton/CONTRIBUTING.md deleted file mode 100644 index b51e2abb815e1c51e55ecff20054c24ae723fceb..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/CONTRIBUTING.md +++ /dev/null @@ -1,203 +0,0 @@ - - -# Contributing - -Contributions are welcome, and they are much appreciated! Every little -helps, and we will always give credit. - -## Types of Contributions - -### Report Bugs - -Report bugs at [https://github.com/triton-inference-server/pytriton/issues](https://github.com/triton-inference-server/pytriton/issues). - -When reporting a bug, please include the following information: - -* Your operating system name and version. -* Any details about your local setup that might be helpful in troubleshooting. -* Detailed steps to reproduce the bug. - -### Fix Bugs - -Look through the GitHub issues for bugs. Anything tagged with "bug" and "help -wanted" is open to whoever wants to implement it. - -### Implement Features - -Browse through the GitHub issues for features. Anything tagged with "enhancement" and "help wanted" is open to whoever wants to implement it. - -### Write Documentation - -The PyTriton could always use more documentation, whether as part of -the official PyTriton docs, in docstrings, or even on the web in blog posts, -articles, and such. - -### Submit Feedback - -The best way to send feedback is to file an issue at [https://github.com/triton-inference-server/pytriton/issues](https://github.com/triton-inference-server/pytriton/issues). - -If you are proposing a feature: - -* Explain in detail how it would work. -* Keep the scope as narrow as possible to make it easier to implement. - -## Sign your Work - -We require that all contributors "sign-off" on their commits. This certifies that -the contribution is your original work, or you have the rights to submit it under -the same license or a compatible license. - -Any contribution which contains commits that are not Signed-Off will not be accepted. - -To sign off on a commit, simply use the `--signoff` (or `-s`) option when committing your changes: - -```shell -$ git commit -s -m "Add a cool feature." -``` - -This will append the following to your commit message: - -``` -Signed-off-by: Your Name -``` - -By doing this, you certify the following: - -``` -Developer Certificate of Origin -Version 1.1 - -Copyright (C) 2004, 2006 The Linux Foundation and its contributors. -1 Letterman Drive -Suite D4700 -San Francisco, CA, 94129 - -Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. - - -Developer's Certificate of Origin 1.1 - -By making a contribution to this project, I certify that: - -(a) The contribution was created in whole or in part by me and I have the right to submit it under the open source license indicated in the file; or - -(b) The contribution is based upon previous work that, to the best of my knowledge, is covered under an appropriate open source license and I have the right under that license to submit that work with modifications, whether created in whole or in part by me, under the same open source license (unless I am permitted to submit under a different license), as indicated in the file; or - -(c) The contribution was provided directly to me by some other person who certified (a), (b) or (c) and I have not modified it. - -(d) I understand and agree that this project and the contribution are public and that a record of the contribution (including all personal information I submit with it, including my sign-off) is maintained indefinitely and may be redistributed consistent with this project or the open source license(s) involved. -``` - -## Get Started! - -### Local Development - -Ready to contribute? Here's how to set up the `PyTriton` for local development. - -1. Fork the `PyTriton` repo on GitHub. -2. Clone your fork locally: - -```shell -$ git clone git@github.com:your_name_here/pytriton.git -``` - -3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, here's how you set up your fork for local development: - -```shell -$ mkvirtualenv pytriton -$ cd pytriton/ -``` - -If you do not use the virtualenvwrapper package, you can initialize a virtual environment using the pure Python command: - -```shell -$ python -m venv pytriton -$ cd pytriton/ -$ source bin/activate -``` - -Once the virtualenv is activated, install the development dependencies: - -```shell -$ make install-dev -``` - -4. Extract Triton Server to your environment so you can debug PyTriton while serving some models on Triton: - -```shell -$ make extract-triton -``` - -5. Install pre-commit hooks: - -```shell -$ pre-commit install -``` - -6. Create a branch for local development: - -```shell -$ git checkout -b name-of-your-bugfix-or-feature -``` - -Now you can make your changes locally. - -7. When you're done making changes, check that your changes pass linters and the - tests, including testing other Python versions with tox: - -```shell -$ make lint # will run, among others, flake8 and pytype linters -$ make test # will run a test on your current virtualenv -``` - - To run a subset of tests: - -```shell -$ pytest tests.test_subset -``` - -8. Commit your changes and push your branch to GitHub: - -```shell -$ git add . -$ git commit -s -m "Your detailed description of your changes." -$ git push origin name-of-your-bugfix-or-feature -``` - -9. Submit a pull request through the GitHub website. - -### Pull Request Guidelines - -Before you submit a pull request, check that it meets these guidelines: - -1. The pull request should include tests. -2. If the pull request adds functionality, you should update the docs. Put your new functionality into a function with a docstring and add the feature to the list in README.md. - - -## Documentation - -Add/update docstrings as defined in [Google Style Guide](https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings). - -## Contributor License Agreement (CLA) - -PyTriton requires that all contributors (or their corporate entity) send -a signed copy of the [Contributor License -Agreement](https://github.com/NVIDIA/triton-inference-server/blob/master/Triton-CCLA-v1.pdf) -to triton-cla@nvidia.com. - -*NOTE*: Contributors with no company affiliation can fill `N/A` in the -`Corporation Name` and `Corporation Address` fields. diff --git a/stf/stf-api-alternative/pytriton/COPYRIGHT b/stf/stf-api-alternative/pytriton/COPYRIGHT deleted file mode 100644 index 80c84c7dd322f593e15b1aacc60d2bb37a82abc5..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/COPYRIGHT +++ /dev/null @@ -1,13 +0,0 @@ -Copyright (c) 2020-2022, NVIDIA CORPORATION. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/LICENSE b/stf/stf-api-alternative/pytriton/LICENSE deleted file mode 100644 index 895657b9a966424100865826cff85a6a1b4c8bb9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/LICENSE +++ /dev/null @@ -1,174 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/Makefile b/stf/stf-api-alternative/pytriton/Makefile deleted file mode 100644 index c3fbb9d49f4a0605b97ace2a52429545caef021e..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/Makefile +++ /dev/null @@ -1,124 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -.PHONY: clean clean-build clean-tritonserver clean-pyc clean-docs clean-test docs lint test coverage release dist build-triton extract-triton install install-dev help -.DEFAULT_GOAL := help - -define BROWSER_PYSCRIPT -import os, webbrowser, sys - -from urllib.request import pathname2url - -webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1]))) -endef -export BROWSER_PYSCRIPT - -define PRINT_HELP_PYSCRIPT -import re, sys - -for line in sys.stdin: - match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line) - if match: - target, help = match.groups() - print("%-20s %s" % (target, help)) -endef -export PRINT_HELP_PYSCRIPT - -BROWSER := python -c "$$BROWSER_PYSCRIPT" -PIP_INSTALL := pip install --extra-index-url https://pypi.ngc.nvidia.com -TRITONSERVER_IMAGE_VERSION = 23.10 -TRITONSERVER_IMAGE_NAME = nvcr.io/nvidia/tritonserver:$(TRITONSERVER_IMAGE_VERSION)-pyt-python-py3 -TRITONSERVER_OUTPUT_DIR = ${PWD}/pytriton/tritonserver -TRITONSERVER_BASENAME = pytriton -PYTRITON_IMAGE_NAME = $(TRITONSERVER_BASENAME):$(TRITONSERVER_IMAGE_VERSION) -# to set PLATFORM from outside, use: make PLATFORM=linux/arm64; -# correct values are: linux/amd64 (default), linux/arm64 -PLATFORM=linux/amd64 - -help: - @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) - -clean: clean-build clean-pyc clean-test clean-tritonserver clean-docs ## remove all build, tritonserver, test, docs, coverage and Python artifacts - -clean-build: ## remove build artifacts - rm -fr build/ - rm -fr dist/ - rm -fr .eggs/ - find . -name '*.egg-info' -exec rm -fr {} + - find . -name '*.egg' -exec rm -f {} + - -clean-tritonserver: - rm -fr pytriton/tritonserver - -clean-pyc: ## remove Python file artifacts - find . -name '*.pyc' -exec rm -f {} + - find . -name '*.pyo' -exec rm -f {} + - find . -name '*~' -exec rm -f {} + - find . -name '__pycache__' -exec rm -fr {} + - -clean-docs: ## remove test and coverage artifacts - rm -rf site - -clean-test: ## remove test and coverage artifacts - rm -fr .tox/ - rm -f .coverage - rm -fr htmlcov/ - rm -fr .pytest_cache - rm -fr .pytype/ - -docs: clean-docs ## generate site - cp CHANGELOG.md docs - cp CONTRIBUTING.md docs - cp LICENSE docs/LICENSE.md - cp examples/README.md docs/examples.md - mkdocs build --clean - -docs-serve: docs - mkdocs serve - -lint: ## check style with pre-commit and pytype - tox -e pytype,pre-commit --develop - -test: ## run tests on every Python version with tox - tox --develop --skip-missing-interpreters - -coverage: ## check code coverage quickly with the default Python - coverage run --source pytriton -m pytest - coverage report -m - coverage html - $(BROWSER) htmlcov/index.html - -dist: clean build-triton extract-triton ## builds source and wheel package - bash ./scripts/build_wheel.sh $(PLATFORM) - ls -lh dist - find ./dist -iname *-linux*.whl -type f -exec bash ./scripts/add_libs_to_wheel.sh $(PYTRITON_IMAGE_NAME) $(TRITONSERVER_OUTPUT_DIR) {} $(PLATFORM) \; - find ./dist -iname *-linux*.whl -type f -delete - ls -lh dist - twine check dist/* - -build-triton: ## build Triton with Python Stubs - bash ./scripts/build_triton.sh $(TRITONSERVER_IMAGE_NAME) $(PYTRITON_IMAGE_NAME) $(PLATFORM) - echo "export PYTRITON_IMAGE_NAME=$(PYTRITON_IMAGE_NAME)" > .env - -extract-triton: build-triton ## extract Triton binaries and libraries - # changing dst path, change also in clean-build and pyproject.toml - bash ./scripts/extract_triton.sh $(PYTRITON_IMAGE_NAME) $(TRITONSERVER_OUTPUT_DIR) $(PLATFORM) - - -install: clean extract-triton ## install the package to the active Python's site-packages - $(PIP_INSTALL) --upgrade pip - $(PIP_INSTALL) . - -install-dev: clean-build clean-pyc - $(PIP_INSTALL) --upgrade pip - $(PIP_INSTALL) -e .[dev] diff --git a/stf/stf-api-alternative/pytriton/README.md b/stf/stf-api-alternative/pytriton/README.md deleted file mode 100644 index 0b929e805ed802783c15b2b0acc6cb06973f42ec..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/README.md +++ /dev/null @@ -1,343 +0,0 @@ - - -# PyTriton - -PyTriton is a Flask/FastAPI-like interface that simplifies Triton's deployment in Python environments. -The library allows serving Machine Learning models directly from Python through -NVIDIA's [Triton Inference Server](https://github.com/triton-inference-server). - - - - -- [Documentation](#documentation) -- [Feature matrix](#feature-matrix) -- [How it works?](#how-it-works) -- [Installation](#installation) - - [Prerequisites](#prerequisites) - - [Install from `pypi`](#install-from-pypi) - - [Setting Up Python Environment](#setting-up-python-environment) - - [Building binaries from source](#building-binaries-from-source) -- [Quick Start](#quick-start) -- [Architecture](#architecture) -- [Examples](#examples) - - [Streaming (alpha)](#streaming-alpha) - - [Profiling model](#profiling-model) -- [Version management](#version-management) -- [Useful Links](#useful-links) - - - -## Documentation - -Read how to customize the Triton Inference Server, load models, deploy on clusters, and the API reference -can be found in the [documentation](https://triton-inference-server.github.io/pytriton). The below sections provide -brief information about the product and quick start guide. - -## Feature matrix - -| Feature | Description | -| ------- | ----------- | -| Native Python support | You can create any Python function and expose it as an HTTP/gRPC API. | -| Framework-agnostic | You can run any Python code with any framework of your choice, such as: PyTorch, TensorFlow, or JAX. | -| Performance optimization | You can benefit from dynamic batching, response cache, model pipelining, and GPU/CPU inference. | -| Easy installation and setup | You can use a simple and familiar interface based on Flask/FastAPI for easy installation and setup. | -| Model clients | You can access high-level model clients for HTTP/gRPC requests with configurable options and both synchronous and asynchronous API. | -| Streaming (alpha) | You can stream partial responses from a model by serving it in a decoupled mode. | - -## How it works? - -In PyTriton, like in Flask or FastAPI, you can define any Python function that executes a Machine Learning model prediction and exposes -it through an HTTP/gRPC API. PyTriton installs Triton Inference Server in your environment and uses it for handling -HTTP/gRPC requests and responses. Our library provides a Python API that allows you to attach a Python function to Triton -and a communication layer to send/receive data between Triton and the function. The solution enables using the -performance features of Triton Inference Server, such as dynamic batching or response cache, without changing your model -environment. Thus, it improves the performance of running inference on GPU for models implemented in Python. The solution is -framework-agnostic and can be used along with frameworks like PyTorch, TensorFlow, or JAX. - -## Installation - -We assume that you are comfortable with the Python programming language and familiar with Machine Learning models. -Using [Docker](https://www.docker.com/) is an option, but not mandatory. - -The library can be installed in: - -- system environment -- virtualenv -- [Docker](https://www.docker.com/) image - -NVIDIA optimized Docker images for Python frameworks can be obtained from the [NVIDIA NGC Catalog](https://catalog.ngc.nvidia.com/containers). - -If you want to use the Docker runtime, we recommend that you install [NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/overview.html) to -enable running model inference on NVIDIA GPU. - -### Prerequisites - -Before installing the library, ensure that you meet the following requirements: - -- An operating system with glibc >= `2.35`. - - Triton Inference Server and PyTriton have **only** been rigorously tested on Ubuntu 22.04. - - Other supported operating systems include Ubuntu Debian 11+, Rocky Linux 9+, and Red Hat Universal Base Image 9+. - - To check your glibc version, run `ldd --version` -- Python version >= `3.8` -- Use `pip >= `20.3` -- Install `libpython3.*.so` in the operating system (appropriate for Python version). - -### Install from `pypi` - -The PyTriton can be installed from [pypi.org](https://pypi.org/project/nvidia-pytriton/) by running the following command: - -```shell -pip install -U nvidia-pytriton -``` - -**Important**: The Triton Inference Server binary is installed as part of the PyTriton package. - -More details about installation can be found in the [documentation](https://triton-inference-server.github.io/pytriton/latest/installation/). - - -### Setting Up Python Environment - -The PyTriton requires installation and linking `libpython3.*.so`. Read more in "[Setting Up Python Environment](https://triton-inference-server.github.io/pytriton/latest/installation#setting-up-python-environment)" -for additional information how to configure system for different Python versions. - -### Building binaries from source - -The binary package can be built from the source, allowing access to unreleased hotfixes, the ability to modify the PyTriton code, and compatibility with various Triton Inference Server versions, including custom server builds. -For further information on building the PyTriton binary, refer to the [Building](https://triton-inference-server.github.io/pytriton/latest/building/) page of documentation. - -## Quick Start - -The quick start presents how to run Python model in Triton Inference Server without need to change the current working -environment. In the example we are using a simple `Linear` PyTorch model. - -The requirement for the example is to have installed PyTorch in your environment. You can do it running: - -```shell -pip install torch -``` - -The integration of model requires to provide following elements: - -- The model - framework or Python model or function that handle inference requests -- Inference callback - a lambda or function which handle the input data coming from Triton and return the result -- Python function connection with Triton Inference Server - a binding for communication between Triton and Python - callback - -In the next step define the `Linear` model: - -```python -import torch - -model = torch.nn.Linear(2, 3).to("cuda").eval() -``` - -In the second step, create an inference callable as a function. The function obtains the HTTP/gRPC request data as an argument, which should be in the form of a NumPy array. The expected return object should also be a NumPy array. You can define an inference callable as a function that uses the `@batch` decorator from PyTriton. This decorator converts the input request into a more suitable format that can be directly passed to the model. You can read more about [decorators here](docs/decorators.md). - -Example implementation: - - - -```python -import numpy as np -from pytriton.decorators import batch - - -@batch -def infer_fn(**inputs: np.ndarray): - (input1_batch,) = inputs.values() - input1_batch_tensor = torch.from_numpy(input1_batch).to("cuda") - output1_batch_tensor = model(input1_batch_tensor) # Calling the Python model inference - output1_batch = output1_batch_tensor.cpu().detach().numpy() - return [output1_batch] -``` - -In the next step, you can create the binding between the inference callable and Triton Inference Server using the `bind` method from pyTriton. This method takes the model name, the inference callable, the inputs and outputs tensors, and an optional model configuration object. - - - -```python -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton - -# Connecting inference callable with Triton Inference Server -with Triton() as triton: - # Load model into Triton Inference Server - triton.bind( - model_name="Linear", - infer_func=infer_fn, - inputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - ], - outputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128) - ) - ... -``` - -Finally, serve the model with the Triton Inference Server: - - - -```python -from pytriton.triton import Triton - -with Triton() as triton: - ... # Load models here - triton.serve() -``` - -The `bind` method creates a connection between the Triton Inference Server and the `infer_fn`, which handles -the inference queries. The `inputs` and `outputs` describe the model inputs and outputs that are exposed in -Triton. The config field allows more parameters for model deployment. - -The `serve` method is blocking, and at this point, the application waits for incoming HTTP/gRPC requests. From that -moment, the model is available under the name `Linear` in the Triton server. The inference queries can be sent to -`localhost:8000/v2/models/Linear/infer`, which are passed to the `infer_fn` function. - -If you would like to use Triton in the background mode, use `run`. More about that can be found -in the [Deploying Models](https://triton-inference-server.github.io/pytriton/latest/initialization/) page. - -Once the `serve` or `run` method is called on the `Triton` object, the server status can be obtained using: - - - -```shell -curl -v localhost:8000/v2/health/live -``` - -The model is loaded right after the server starts, and its status can be queried using: - - - -```shell -curl -v localhost:8000/v2/models/Linear/ready -``` - -Finally, you can send an inference query to the model: - - - -```shell -curl -X POST \ - -H "Content-Type: application/json" \ - -d @input.json \ - localhost:8000/v2/models/Linear/infer -``` - -The `input.json` with sample query: - -```json -{ - "id": "0", - "inputs": [ - { - "name": "INPUT_1", - "shape": [1, 2], - "datatype": "FP32", - "parameters": {}, - "data": [[-0.04281254857778549, 0.6738349795341492]] - } - ] -} -``` - -Read more about the HTTP/gRPC interface in the Triton Inference Server -[documentation](https://github.com/triton-inference-server/server/blob/main/docs/customization_guide/inference_protocols.md#httprest-and-grpc-protocols). - -You can also validate the deployed model using a simple client that can perform inference requests: - - - -```python -import torch -from pytriton.client import ModelClient - -input1_data = torch.randn(128, 2).cpu().detach().numpy() - -with ModelClient("localhost:8000", "Linear") as client: - result_dict = client.infer_batch(input1_data) - -print(result_dict) -``` - -The full example code can be found in [examples/linear_random_pytorch](examples/linear_random_pytorch). - -You can learn more about client usage in the [Clients](https://triton-inference-server.github.io/pytriton/latest/clients/) document. - -More information about running the server and models can be found -in [Deploying Models](https://triton-inference-server.github.io/pytriton/latest/initialization/) page of documentation. - -## Architecture - -The diagram below presents the schema of how the Python models are served through Triton Inference Server using -PyTriton. The solution consists of two main components: - -- Triton Inference Server: for exposing the HTTP/gRPC API and benefiting from performance features like dynamic batching - or response cache. -- Python Model Environment: your environment where the Python model is executed. - -The Triton Inference Server binaries are provided as part of the PyTriton installation. The Triton Server is -installed in your current environment (system or container). The PyTriton controls the Triton Server process -through the `Triton Controller`. - -Exposing the model through PyTriton requires the definition of an `Inference Callable` - a Python function that is -connected to Triton Inference Server and executes the model or ensemble for predictions. The integration layer binds -the `Inference Callable` to Triton Server and exposes it through the Triton HTTP/gRPC API under a provided ``. Once -the integration is done, the defined `Inference Callable` receives data sent to the HTTP/gRPC API endpoint -`v2/models//infer`. Read more about HTTP/gRPC interface in Triton Inference Server -[documentation](https://github.com/triton-inference-server/server/blob/main/docs/customization_guide/inference_protocols.md#httprest-and-grpc-protocols). - -The HTTP/gRPC requests sent to `v2/models//infer` are handled by Triton -Inference Server. The server batches requests and passes them to the `Proxy Backend`, which sends the batched requests to the appropriate -`Inference Callable`. The data is sent as a `numpy` array. Once the `Inference Callable` finishes execution of -the model prediction, the result is returned to the `Proxy Backend`, and a response is created by Triton Server. - -![High Level Design](docs/assets/hld.svg) - - - - -## Examples - -The [examples](examples) page presents various cases of serving models using PyTriton. You can find simple examples of -running PyTorch, TensorFlow2, JAX, and simple Python models. Additionally, we have prepared more advanced scenarios like online -learning, multi-node models, or deployment on Kubernetes using PyTriton. Each example contains instructions describing -how to build and run the example. Learn more about how to use PyTriton by reviewing our [examples](examples). - -### Streaming (alpha) - -We introduced new alpha feature to PyTriton that allows to stream partial responses from a model. It is based on NVIDIA Triton Inference deocoupled models feature. Look at example in [examples/huggingface_dialogpt_streaming_pytorch](examples/huggingface_dialogpt_streaming_pytorch). - -### Profiling model - -The [Perf Analyzer](https://github.com/triton-inference-server/client/blob/main/src/c++/perf_analyzer/README.md) can be -used to profile models served through PyTriton. We have prepared an example of -using the Perf Analyzer to profile the BART PyTorch model. The example code can be found -in [examples/perf_analyzer](examples/perf_analyzer). - -## Version management - -PyTriton follows the [Semantic Versioning](https://semver.org/) scheme for versioning. Official releases can be found on [PyPI](https://pypi.org/project/nvidia-pytriton/) and [GitHub releases](https://github.com/triton-inference-server/pytriton/releases). The most up-to-date development version is available on the `main` branch, which may include hotfixes that have not yet been released through the standard channels. To install the latest development version, refer to the instructions in the -[building binaries from source](#building-binaries-from-source) section. - -## Useful Links - -- [Changelog](CHANGELOG.md) -- [Known Issues](https://triton-inference-server.github.io/pytriton/latest/known_issues) -- [Contributing](CONTRIBUTING.md) diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/__init__.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/__init__.py deleted file mode 100644 index 293b270fb5cb2a1db13bb53ea9a51f15e21e4205..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# noqa: D104 -from importlib.metadata import PackageNotFoundError, version - -try: - __version__ = version("nvidia-pytriton") -except PackageNotFoundError: - # package is not installed - pass - -from pytriton import ( - client, # noqa: F401 - model_config, # noqa: F401 - triton, # noqa: F401 -) diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/__main__.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/__main__.py deleted file mode 100644 index 6d55feec216447ac76f3ea3c9aac2784516cbc52..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/__main__.py +++ /dev/null @@ -1,218 +0,0 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Pytriton check module.""" - -import logging -import os -import pathlib -import shutil -import tempfile -from typing import Optional - -import typer -from typing_extensions import Annotated - -from pytriton.check.add_sub import add_sub_example, add_sub_example_thread -from pytriton.check.env_checks import env_checks - -warning_message = """ -+---------------------------------------------------------------+ -| WARNING | -+---------------------------------------------------------------+ -| Command may collect sensitive information, please review the | -| log and the ZIP before sharing. | -+---------------------------------------------------------------+ -""" - - -app = typer.Typer(help="Pytriton check tool.\n\nThis tool is used to check the environment and run examples.") - - -class CheckEnvironment: - """Check environment class. - - Args: - workspace_path: Path to workspace - name: Name of the sub_workspace - zip_results: Flag if results should be zipped - check_workspace_exist: Flag if workspace should be checked if exists - """ - - def __init__( - self, - workspace_path: Optional[pathlib.Path], - name: str, - zip_results: bool = True, - check_workspace_exist: bool = True, - ): - """Initialize class.""" - self.name = name - self._zip_results = zip_results - self._temp_workspace = None - - self.logger = logging.getLogger(name) - if check_workspace_exist and workspace_path is not None and workspace_path.exists(): - self.logger.error(f"Workspace path {workspace_path} already exists") - raise typer.Exit(code=1) - if workspace_path is None: - self._temp_workspace = tempfile.TemporaryDirectory(prefix="pytriton_workspace_") - workspace_path = pathlib.Path(self._temp_workspace.name) - else: - workspace_path.mkdir(parents=True, exist_ok=True) - logging.basicConfig(level=logging.DEBUG, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - self.logger.addHandler(logging.FileHandler(workspace_path / (name + "_log.txt"))) - self.workspace_path = workspace_path - self.sub_workspace = workspace_path / name - - def __enter__(self): - """Enter method.""" - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - """Exit method zips results if required.""" - self.zip_results() - - def zip_results(self): - """Zip results.""" - if self._zip_results: - if self.workspace_path.exists(): - if self._temp_workspace is not None: - output_file_base = pathlib.Path(os.getcwd()) / self.workspace_path.name - else: - output_file_base = self.workspace_path - self.logger.info(f"Zipping {self.workspace_path} to {output_file_base}.zip") - shutil.make_archive(str(output_file_base.resolve()), "zip", str(self.workspace_path.resolve())) - else: - self.logger.error(f"Workspace path {self.workspace_path} does not exist") - - -@app.command("example-add-sub-script") -def example_add_sub_script( - workspace: Annotated[Optional[pathlib.Path], typer.Option("--workspace", "-w")] = None, - zip_results: Annotated[bool, typer.Option("--zip")] = True, -): - """Run example using external script. - - Args: - workspace: Workspace path that will be created to store testing output (should not exist) - zip_results: flag if output should be zipped - """ - with CheckEnvironment(workspace, "example_add_sub_script", zip_results) as ce: - try: - add_sub_example_thread(ce.sub_workspace, ce.logger) - except Exception as e: - ce.logger.error(f"Error occurred in command: {e}") - - -@app.command("example-add-sub") -def example_add_sub( - workspace: Annotated[Optional[pathlib.Path], typer.Option("--workspace", "-w")] = None, - zip_results: Annotated[bool, typer.Option("--zip")] = True, -): - """Run example. - - Args: - workspace: Workspace path that will be created to store testing output (should not exist) - zip_results: flag if output should be zipped - """ - with CheckEnvironment(workspace, "example_add_sub", zip_results) as ce: - try: - add_sub_example(ce.sub_workspace, ce.logger) - except Exception as e: - ce.logger.error(f"Error occurred in command: {e}") - - -@app.command("examples") -def examples( - workspace: Annotated[Optional[pathlib.Path], typer.Option("--workspace", "-w")] = None, - zip_results: Annotated[bool, typer.Option("--zip")] = True, -): - """Run example in the same process. - - Args: - workspace: Workspace path that will be created to store testing output (should not exist) - zip_results: flag if output should be zipped - """ - with CheckEnvironment(workspace, "example_add_sub", zip_results) as ce: - try: - add_sub_example(ce.sub_workspace, ce.logger) - except Exception as e: - ce.logger.error(f"Error occurred in command: {e}") - - with CheckEnvironment(workspace, "example_add_sub_script", zip_results, check_workspace_exist=False) as ce: - try: - add_sub_example_thread(ce.sub_workspace, ce.logger) - except Exception as e: - ce.logger.error(f"Error occurred in command: {e}") - - -@app.command("env") -def env_check( - workspace: Annotated[Optional[pathlib.Path], typer.Option("--workspace", "-w")] = None, - zip_results: Annotated[bool, typer.Option("--zip")] = True, -): - """Run all environment checks. - - It may collect sensitive system information in the log. Please review the log before sharing. - - Args: - workspace: Workspace path that will be created to store testing output (should not exist) - zip_results: flag if output should be zipped - """ - with CheckEnvironment(workspace, "env_checks", zip_results) as ce: - try: - env_checks(ce.logger) - except Exception as e: - ce.logger.error(f"Error occurred in command: {e}") - - -@app.command("check") -def check( - workspace: Annotated[Optional[pathlib.Path], typer.Option("--workspace", "-w")] = None, - zip_results: Annotated[bool, typer.Option("--zip")] = True, -): - """Run all checks. - - Args: - workspace: Workspace path that will be created to store testing output (should not exist) - zip_results: flag if output should be zipped - """ - with CheckEnvironment(workspace, "all_checks", zip_results) as ce: - try: - ce.logger.info("Running all common checks") - env_check(ce.workspace_path / "env", False) - examples(ce.workspace_path / "examples", False) - except Exception as e: - ce.logger.error(f"Error occurred in command: {e}") - - -@app.callback(invoke_without_command=True) -def default_command(ctx: typer.Context): - """Default command.""" - if ctx.invoked_subcommand is None: - check() - - -def main(): - """Main function.""" - logger = logging.getLogger("PyTriton-Check") - try: - logger.warning(warning_message) - app() - finally: - logger.warning(warning_message) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/check/__init__.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/check/__init__.py deleted file mode 100644 index ae1da8a23f7197b0b0598b59e013b4946e7d6e9e..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/check/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# noqa: D104 diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/check/add_sub.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/check/add_sub.py deleted file mode 100644 index 7c2a7408f5e4c6632476a8e923858feb0e5d3cbb..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/check/add_sub.py +++ /dev/null @@ -1,139 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Add_sub example model for checking corectness of triton environment.""" - -import argparse -import logging -import pathlib -import signal -import sys - -import numpy as np - -from pytriton.check.utils import ScriptThread -from pytriton.client import ModelClient -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton - -logger = logging.getLogger("check.add_sub_example") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") -add_script_path = [sys.executable, "pytriton/check/add_sub.py"] - - -@batch -def _add_sub(**inputs): - a_batch, b_batch = inputs.values() - add_batch = a_batch + b_batch - sub_batch = a_batch - b_batch - return {"add": add_batch, "sub": sub_batch} - - -def prepare_triton(workspace: pathlib.Path): - """Prepare triton server with AddSub model.""" - triton = Triton(workspace=str(workspace.resolve())) - triton.run() - logger.info("Loading AddSub model") - triton.bind( - model_name="AddSub", - infer_func=_add_sub, - inputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - Tensor(dtype=np.float32, shape=(-1,)), - ], - outputs=[ - Tensor(name="add", dtype=np.float32, shape=(-1,)), - Tensor(name="sub", dtype=np.float32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128), - strict=True, - ) - return triton - - -def infer_add_sub_model(): - """Infer AddSub model.""" - batch_size = 2 - a_batch = np.ones((batch_size, 1), dtype=np.float32) - b_batch = np.ones((batch_size, 1), dtype=np.float32) - - logger.info(f"a: {a_batch.tolist()}") - logger.info(f"b: {b_batch.tolist()}") - - with ModelClient("localhost", "AddSub") as client: - logger.info("Sending inference request") - result_batch = client.infer_batch(a_batch, b_batch) - - for output_name, data_batch in result_batch.items(): - logger.info(f"{output_name}: {data_batch.tolist()}") - - -def serve_triton(workspace: pathlib.Path): - """Serve triton server with AddSub model.""" - triton = prepare_triton(workspace) - logger.info("Serving AddSub model") - triton.serve() - - -def add_sub_example_thread(workspace: pathlib.Path, logger: logging.Logger): - """Run example using external script. - - Args: - workspace: Workspace path that will be created to store testing output (should not exist) - logger: logger instance - """ - logger.info("Running example model using external script") - - with ScriptThread(add_script_path + ["--workspace", str(workspace.resolve())], name="server") as server_thread: - import time - - time.sleep(3) - infer_add_sub_model() - - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - server_thread.join() - logger.error(server_thread.output) - if server_thread.returncode not in [ - 0, - -2, - ]: - logger.error(f"Server failed - return code {server_thread.returncode}") - - -def add_sub_example(workspace: pathlib.Path, logger: logging.Logger): - """Run example in the same process. - - Args: - workspace: Workspace path that will be created to store testing output (should not exist) - logger: logger instance - """ - logger.info("Running example model") - triton = prepare_triton(workspace) - infer_add_sub_model() - triton.stop() - - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument("--workspace", help="Workspace path", type=str) - parser.add_argument("--infer", default=False, help="Infer AddSub model", action="store_true") - args = parser.parse_args() - - if args.infer: - infer_add_sub_model() - else: - serve_triton(pathlib.Path(args.workspace)) diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/check/env_checks.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/check/env_checks.py deleted file mode 100644 index 86a8a66c338ff7914f3a42ea3d95c869eb454ff5..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/check/env_checks.py +++ /dev/null @@ -1,201 +0,0 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Environment checks.""" - -import logging -import os -import pathlib -import platform -import re -import sys - -import psutil - -from pytriton.check.utils import ScriptThread - - -def nvidia_smi(logger): - """Run nvidia-smi. - - Args: - logger: logger instance - """ - logger.info("Running nvidia-smi") - with ScriptThread(["nvidia-smi"], name="nvidia-smi") as nvidia_smi_thread: - nvidia_smi_thread.join() - logger.info(nvidia_smi_thread.output) - if nvidia_smi_thread.returncode != 0: - logger.error("nvidia-smi failed - possible cause: no GPU available or driver not installed") - logger.error( - "If running in WSL wit sudo, make sure to add nvidia-smi folder (e.g. /usr/lib/wsl/lib) to sudoers file!" - ) - - -def get_platform_info(logger): - """Get platform information (OS, python, etc.). - - Args: - logger: logger instance - """ - logger.info("Checking OS version") - logger.info("Script is running in docker:" + str(pathlib.Path("/.dockerenv").exists())) - - os_release_path = pathlib.Path("/etc/os-release") - if os_release_path.exists(): - with os_release_path.open() as f: - os_release = f.read() - logger.info("OS release") - logger.info(os_release) - for line in os_release.split("\n"): - if "PRETTY_NAME" in line: - os_version = line.split("=")[1].strip() - logger.info(f"OS version: {os_version}") - else: - logger.warning("OS release file not found (not available on some systems") - - logger.info("Get platform info") - logger.info(f"Platform: {platform.platform()}") - logger.info(f"System: {platform.system()}") - logger.info(f"Release: {platform.release()}") - logger.info(f"Version: {platform.version()}") - logger.info(f"Machine: {platform.machine()}") - logger.info(f"Processor: {platform.processor()}") - logger.info(f"Python version: {platform.python_version()}") - logger.info(f"Python implementation: {platform.python_implementation()}") - logger.info(f"Python compiler: {platform.python_compiler()}") - logger.info(f"Python build: {platform.python_build()}") - logger.info(f"libc_ver: {platform.libc_ver()}") - - -def check_psutil_stats(logger): - """Check psutil stats. - - Args: - logger: logger instance - """ - logger.info("Checking psutil stats") - logger.info("Memory stats") - logger.info(psutil.virtual_memory()) - logger.info("Swap stats") - logger.info(psutil.swap_memory()) - logger.info("Disk stats") - logger.info(psutil.disk_usage("/")) - logger.info("Disk io countwers") - logger.info(psutil.disk_io_counters()) - logger.info("CPU stats") - logger.info(psutil.cpu_times()) - logger.info("Network stats") - logger.info(psutil.net_io_counters()) - - -def get_listening_processes(logger): - """Get listening processes. - - Args: - logger: logger instance - """ - logger.info("Listening processes") - processes = {proc.pid: proc.name for proc in psutil.process_iter(["pid", "name"])} - connections = psutil.net_connections() - listening_sockets = [conn for conn in connections if conn.status == "LISTEN"] - - for listening_socket in listening_sockets: - process_name = None - if listening_socket.pid is not None and listening_socket.pid in processes: - process_name = processes[listening_socket.pid] - logger.info( - f"Process ID: {listening_socket.pid}, Name: {process_name}, Local Address: {listening_socket.laddr}, Remote Address: {listening_socket.raddr}, Status: {listening_socket.status}" - ) - - -def installed_packages(logger): - """Get installed packages. - - Args: - logger: logger instance - """ - logger.info("Checking installed packages") - import importlib_metadata - - packages = importlib_metadata.distributions() - - installed_pkg = sorted([f"{package.metadata['Name']}=={package.version} ({package._path})" for package in packages]) - installed_pkg_str = "\n[\n\t" + ",\n\t".join(installed_pkg) + "\n]" - logger.info(installed_pkg_str) - - -def check_compiler_and_clib(logger): - """Check compiler and C libraries. - - Args: - logger: logger instance - """ - logger.info("Checking compiler and C libraries") - with ScriptThread(["gcc", "--version"], name="gcc_version") as gcc_version_thread: - gcc_version_thread.join() - logger.info("GCC version:") - logger.info(gcc_version_thread.output) - if gcc_version_thread.returncode != 0: - logger.error("gcc failed") - - logger.info("Python version:") - logger.info(sys.version) - - try: - logger.info(os.confstr("CS_GNU_LIBC_VERSION")) - except AttributeError as e: - logger.error(f"Failed to get glibc version {e}") - - -def log_env_variables(logger): - """Log environment variables. - - Args: - logger: logger instance - """ - logger.info("Environment variables") - - env_vars = os.environ.items() - blacklist_patterns = [ - r".*token.*", - r".*secret.*", - r".*key.*", - r".*password.*", - ] - - patterns = [re.compile(pattern, re.IGNORECASE) for pattern in blacklist_patterns] - filtered_env_vars = [ - f"{key}={value}" - for key, value in env_vars - if not any(pattern.search(key) or pattern.search(value) for pattern in patterns) - ] - - env_vars_str = "\n".join(filtered_env_vars) - logger.info(env_vars_str) - - -def env_checks(logger: logging.Logger): - """Run all environment checks. - - Args: - logger: logger instance - """ - logger.info("Running all environment checks") - get_platform_info(logger) - nvidia_smi(logger) - installed_packages(logger) - check_psutil_stats(logger) - get_listening_processes(logger) - check_compiler_and_clib(logger) - log_env_variables(logger) diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/check/utils.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/check/utils.py deleted file mode 100644 index 21fd1d5efe0f79bbb8a884053f76b9ee13deead8..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/check/utils.py +++ /dev/null @@ -1,555 +0,0 @@ -# Copyright (c) 2022-2024, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Utils.""" - -import contextlib -import fcntl -import logging -import os -import pathlib -import re -import select -import socket -import subprocess -import threading -import typing - -LOGGER = logging.getLogger(__name__) -DEFAULT_LOG_FORMAT = "%(asctime)s - %(levelname)8s - %(process)8d - %(threadName)s - %(name)s: %(message)s" - - -def _read_outputs(_process, _logger, _outputs): - # Set stdout and stderr file descriptors to non-blocking mode - try: - fcntl.fcntl(_process.stdout, fcntl.F_SETFL, os.O_NONBLOCK) - fcntl.fcntl(_process.stderr, fcntl.F_SETFL, os.O_NONBLOCK) - except ValueError: # when selecting on closed files - return - - buffers = {_process.stdout: "", _process.stderr: ""} - rds = [_process.stdout, _process.stderr] - while rds: - try: - readable, _, _ = select.select(rds, [], [], 1) - except ValueError: # when selecting on closed files - break - - for rd in readable: - try: - data = os.read(rd.fileno(), 4096) - if not data: - rds.remove(rd) - continue - - decoded_data = data.decode("utf-8") - buffers[rd] += decoded_data - lines = buffers[rd].splitlines(keepends=True) - - if buffers[rd].endswith("\n"): - complete_lines = lines - buffers[rd] = "" - else: - complete_lines = lines[:-1] - buffers[rd] = lines[-1] - - for line in complete_lines: - line = line.rstrip() - _logger.info(line) - _outputs.append(line) - except OSError: # Reading from an empty non-blocking file - pass - - -class ScriptThread(threading.Thread): - """A class that runs external script in a separate thread.""" - - def __init__(self, cmd, workdir=None, group=None, target=None, name=None, args=(), kwargs=None) -> None: - """Initializes the ScriptThread object.""" - super().__init__(group, target, name, args, kwargs, daemon=True) - self.cmd = cmd - self.workdir = workdir - self._process_spawned_or_spawn_error_flag = None - self.active = False - self._process = None - self.returncode = None - self._output = [] - self._logger = logging.getLogger(self.name) - - def __enter__(self): - """Starts the script thread.""" - self.start(threading.Event()) - self._process_spawned_or_spawn_error_flag.wait() - return self - - def __exit__(self, *args): - """Stops the script thread and waits for it to join.""" - self.stop() - self.join() - self._process_spawned_or_spawn_error_flag = None - - def start(self, flag: typing.Optional[threading.Event] = None) -> None: - """Starts the script thread.""" - if flag is None: - flag = threading.Event() - self._logger.info(f"Starting {self.name} script with \"{' '.join(self.cmd)}\" cmd") - self._process_spawned_or_spawn_error_flag = flag - super().start() - - def stop(self): - """Sets the active flag to False to stop the script thread.""" - self._logger.info(f"Stopping {self.name} script") - self.active = False - - def run(self): - """Runs the script in a separate process.""" - import psutil - - self.returncode = None - self._output = [] - self._process = None - - os.environ.setdefault("PYTHONUNBUFFERED", "1") # to not buffer logs - try: - with psutil.Popen( - self.cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=0, cwd=self.workdir - ) as process: - self._process = process - self.active = True - if self._process_spawned_or_spawn_error_flag: - self._process_spawned_or_spawn_error_flag.set() - while self.active and process.poll() is None and process.returncode is None: - try: - _read_outputs(process, self._logger, self._output) - except KeyboardInterrupt: - self.stop() - - finally: - if self._process_spawned_or_spawn_error_flag: - self._process_spawned_or_spawn_error_flag.set() - if self.process: - while self.process.poll() is None: - _read_outputs(self.process, self._logger, self._output) - _read_outputs(self.process, self._logger, self._output) - self.returncode = process.wait() # pytype: disable=name-error - self._logger.info(f"{self.name} process finished with {self.returncode}") - - self.active = False - self._process = None - - @property - def output(self): - """Return process stream output.""" - return "\n".join(self._output) - - @property - def process(self): - """Return process object.""" - return self._process - - -def find_free_port() -> int: - """Finds a free port on the local machine.""" - with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s: - s.bind(("", 0)) - s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - return s.getsockname()[1] - - -class ProcessMonitoring: - """A class that dumps the state of a process and its children. - - This class uses the py-spy tool to dump the stack trace of a process and its - children recursively. It also dumps the process information such as the parent - and the command line. It allows registering custom monitors that can perform - additional actions on the process. - - Attributes: - _logger (logging.Logger): The logger object to write messages. - _process (psutil.Process): The process object to monitor. - _children_processes (list[psutil.Process]): The list of child processes to monitor. - _log (logging.Logger.method): The logging method to use for messages. - _remove_color (bool): Whether to remove ANSI escape sequences from the output. - _ansi_escape (re.Pattern): The regular expression object to match ANSI escape sequences. - _custom_monitors (list[typing.Callable[[int], None]]): The list of custom monitor functions to execute on each dump cycle. - """ - - def __init__( - self, - pid: int, - logger: typing.Optional[logging.Logger] = None, - loglevel: int = logging.INFO, - remove_color: bool = False, - ): - """Initializes the ProcessMonitoring object. - - Args: - pid (int): The process ID of the process to monitor. - logger (typing.Optional[logging.Logger], optional): The logger object to write messages. Defaults to None. - loglevel (int, optional): The logging level to use for messages. Defaults to logging.INFO. - remove_color (bool, optional): Whether to remove ANSI escape sequences from the output. Defaults to False. - """ - import re - - import psutil - - self._logger = logger or logging.getLogger("monitoring") - self._process = psutil.Process(pid) - self._children_processes = list(self._process.children(recursive=True)) - self._log = { - logging.DEBUG: self._logger.debug, - logging.INFO: self._logger.info, - logging.WARNING: self._logger.warning, - logging.ERROR: self._logger.error, - }[loglevel] - self._log(f"Initial list of children processes: {self._children_processes}") - self._remove_color = remove_color - pattern = r"\x1b\[.*?m" - self._ansi_escape = re.compile(pattern) - self._custom_monitors = [] - - def register_custom_monitor(self, custom_monitor: typing.Callable[[int], None]) -> None: - """Registers a custom monitor for the process. - - This method adds a custom monitor function to the list of monitors that are - executed on each dump cycle. A custom monitor function should take an integer - as an argument (the process ID) and return None. - - Args: - custom_monitor (typing.Callable[[int], None]): The custom monitor function to register. - """ - self._custom_monitors.append(custom_monitor) - - def dump_state(self) -> None: - """Dumps the state of the process and its children. - - This method calls the _dump_processes_stacktrace and _dump_child_processes - methods to dump the stack trace and the process information of the process - and its children recursively. - """ - self._dump_processes_stacktrace() - self._dump_child_processes() - - def _dump_processes_stacktrace(self): - import psutil - import sh - - self._log("==== Dump process stacktrace") - pyspy_cmd = sh.Command("py-spy") - - for process in [self._process] + self.children: - try: - result = pyspy_cmd("dump", "-ll", "--nonblocking", "-p", str(process.pid)) - if self._remove_color: - result = self._ansi_escape.sub("", str(result)) - self._log(f"Dump stack trace for process (pid={process.pid}) with cmd {process.cmdline()}") - for custom_monitor in self._custom_monitors: - custom_monitor(process.pid) - self._log(result) - except psutil.NoSuchProcess as e: - self._log(f"Error during handling process: {e}") - except sh.ErrorReturnCode_1 as e: - self._log(f"Error during calling py-spy process: {e}") - - def _dump_child_processes(self): - import psutil - - self._log("==== Dump process info (with its children)") - for process in [self._process] + self.children: - try: - self._log(f"{process} parent={process.parent()} ") - except psutil.NoSuchProcess: - self._log(f"{process} is missing in process table") - - @property - def children(self): - """Returns the list of child processes to monitor. - - This property returns the list of child processes to monitor, and updates it - with any new children that are created by the process. - - Returns: - list[psutil.Process]: The list of child processes to monitor. - """ - import psutil - - try: - children = list(self._process.children(recursive=True)) - self._children_processes = list(set(self._children_processes + children)) - except psutil.NoSuchProcess: - pass - return self._children_processes - - -def get_current_container_version(): - """Returns the version of the current container.""" - container_version = os.environ.get("NVIDIA_PYTORCH_VERSION") or os.environ.get("NVIDIA_TENSORFLOW_VERSION") - if container_version and "-" in container_version: - container_version = container_version.split("-")[0] # TF version has format - - return container_version - - -def verify_docker_image_in_readme_same_as_tested(readme_path, image_name_with_version): - """Verify that the docker image is the same as described in the readme file.""" - image_name, _image_version = image_name_with_version.split(":") - framework_name = image_name.split("/")[-1] - readme_payload = pathlib.Path(readme_path).read_text() - match_iterator = re.finditer( - rf"(?P[\w/.\-:]+)/{framework_name}:(?P[\w.-]+)", - readme_payload, - ) - for entry in match_iterator: - assert entry.group() == image_name_with_version, f"{entry.group()} != {image_name_with_version}" - - -def search_warning_on_too_verbose_log_level(logs: str): - """Search warnings.""" - pattern = r"Triton Inference Server is running with enabled verbose logs.*It may affect inference performance." - return re.search(pattern, logs) - - -class ProcessMonitoringThread: - """A class that creates a thread to monitor a process. - - This class uses the ProcessMonitoring class to dump the state of a process - and its children periodically. It also allows registering custom monitors - that can perform additional actions on the process. - - Attributes: - _monitoring (ProcessMonitoring): The ProcessMonitoring object that handles the dumping logic. - _stop_event (threading.Event): The event object that signals the thread to stop its loop. - _thread (threading.Thread): The thread object that runs the _run method in a loop. - _interval (float): The interval in seconds between each dump cycle. - """ - - def __init__(self, monitoring: ProcessMonitoring, interval: float = 60): - """Initializes the ProcessMonitoringThread object. - - Args: - monitoring (ProcessMonitoring): The ProcessMonitoring object that handles the dumping logic. - interval (float, optional): The interval in seconds between each dump cycle. Defaults to 60. - """ - self._monitoring = monitoring - self._interval = interval - - def start(self) -> None: - """Starts the monitoring thread. - - This method creates a new thread that runs the _run method in a loop until - the stop method is called or an exception occurs. It also sets the stop event - object that can be used to signal the thread to stop gracefully. - """ - self._stop_event = threading.Event() - self._thread = threading.Thread(target=self._run, daemon=True) - self._thread.start() - - def stop(self) -> None: - """Stops the monitoring thread. - - This method sets the stop event object that signals the thread to stop its loop. - It also waits for the thread to join before returning. - """ - self._stop_event.set() - self._thread.join() - - def __enter__(self): - """Enters the context manager for the monitoring thread.""" - self.start() - return self - - def __exit__(self, *args): - """Exits the context manager for the monitoring thread.""" - self.stop() - - def _run(self): - logging.info("Monitoring process") - self._monitoring.dump_state() - while not self._stop_event.wait(self._interval): - logging.info("Monitoring process") - self._monitoring.dump_state() - - -class TestMonitoringContext: - """A context manager that monitors test processes. - - This context manager creates threads to monitor the test processes and dumps - their state periodically. It can extend argparse args with additional arguments. - It supports splitting log into different files. The standard output log can have one level - and the file log can have another level. It uses log rotation. - """ - - @staticmethod - def extend_args(parser): - """Extends argparse args with additional arguments.""" - parser.add_argument( - "--verbose", - action="store_true", - help="Provide verbose logs", - ) - parser.add_argument( - "--log-path", - type=str, - default=None, - help="Provide the path of external log for rotation", - ) - parser.add_argument( - "--compress-logs", - action="store_true", - help="Enable logs compression", - ) - parser.add_argument( - "--maximum-log-file", - type=int, - default=10 * 1024 * 1024, - help="Maximum logfile size before rotation is started", - required=False, - ) - parser.add_argument( - "--enable-fault-handler", - action="store_true", - help="Enable faulthandler", - ) - parser.add_argument( - "--faulthandler-interval", - type=float, - default=None, - help="Enable faulthandler after specified number of seconds with repeat", - required=False, - ) - parser.add_argument( - "--process-monitoring-interval", - type=float, - default=None, - help="Enable process monitoring after specified number of seconds with repeat", - required=False, - ) - - def __init__(self, args): - """Initializes the TestMonitoringContext object. - - Args: - args (argparse.Namespace): The argparse args object to extend with additional arguments. - """ - self._args = args - - def __enter__(self): - """Enters the context manager for the test monitoring.""" - import faulthandler - import logging.handlers - - args = self._args - self._loglevel = log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logger = logging.getLogger() - - if args.log_path is not None: - # Create a rotating file handler for the file output logger - # The file name is based on the log path argument, the maximum size is 10 MB, and the maximum number of files is 500 - file_handler = logging.handlers.RotatingFileHandler( - args.log_path, maxBytes=args.maximum_log_file, backupCount=500 - ) - file_handler.setFormatter(logging.Formatter(DEFAULT_LOG_FORMAT)) - file_handler.setLevel(logging.DEBUG) - if args.compress_logs: - file_handler.namer = lambda name: name + ".gz" - - def gzip_rotation(source, dest): - import gzip - import os - - with open(source, "rb") as f_in: - with gzip.open(dest, "wb") as f_out: - f_out.writelines(f_in) - os.remove(source) - - file_handler.rotator = gzip_rotation - - # Add the file handler to the default logger - logger.addHandler(file_handler) - # Get the stream handler that was created by basicConfig - - # Get the stream handler that was created by basicConfig - stream_handler = logger.handlers[0] - # Set the stream handler's level to match the log level argument - stream_handler.setLevel(log_level) - - if args.enable_fault_handler: - faulthandler.enable() - - if args.faulthandler_interval is not None: - faulthandler.dump_traceback_later(args.faulthandler_interval, repeat=True, exit=False) - - custom_monitors = [] - - import os - - import psutil - - def monitor_ram_usage(pid=None): - if pid is None: - pid = os.getpid() - - process = psutil.Process(pid) - logger.debug(f"MONITOR RAM USAGE ({pid}): {process.memory_info()}") - - custom_monitors.append(monitor_ram_usage) - - def monitor_file_descriptors(pid=None): - if pid is None: - pid = os.getpid() - - process = psutil.Process(pid) - logger.debug(f"MONITOR FILE DESCRIPTORS ({pid}): {process.num_fds()}") - - custom_monitors.append(monitor_file_descriptors) - - def monitor_cpu_usage(pid=None): - if pid is None: - pid = os.getpid() - - process = psutil.Process(pid) - logger.debug(f"MONITOR CPU USAGE ({pid}): {process.cpu_percent()}") - - custom_monitors.append(monitor_cpu_usage) - - def monitor_threads(pid=None): - if pid is None: - pid = os.getpid() - - process = psutil.Process(pid) - logger.debug(f"MONITOR THREADS ({pid}): {process.num_threads()}") - - custom_monitors.append(monitor_threads) - - def monitor_process_dict(pid=None): - if pid is None: - pid = os.getpid() - - process = psutil.Process(pid) - logger.debug(f"MONITOR PROCESS DICT ({pid}): {process.as_dict()}") - - custom_monitors.append(monitor_process_dict) - if args.process_monitoring_interval is not None: - monitoring = ProcessMonitoring(os.getpid(), logger, loglevel=logging.DEBUG, remove_color=True) - for monitor in custom_monitors: - monitoring.register_custom_monitor(monitor) - - self._monitor = ProcessMonitoringThread(monitoring, interval=args.process_monitoring_interval) - self._monitor.start() - return self - - def __exit__(self, *args): - """Stops the monitor thread.""" - if hasattr(self, "_monitor"): - self._monitor.stop() - self._monitor = None diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/client/__init__.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/client/__init__.py deleted file mode 100644 index e8638c92e5dd010664bf8b962b3fbc05edd0a379..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/client/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# noqa: D104 - -from .client import ( - AsyncioDecoupledModelClient, # noqa: F401 - AsyncioModelClient, # noqa: F401 - DecoupledModelClient, # noqa: F401 - FuturesModelClient, # noqa: F401 - ModelClient, # noqa: F401 -) diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/client/asyncio_utils.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/client/asyncio_utils.py deleted file mode 100644 index 54ee603f769b78091a9457cf98add7c60de48436..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/client/asyncio_utils.py +++ /dev/null @@ -1,308 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Utility module supporting model clients.""" - -import asyncio -import logging -import time -from typing import Optional, Union - -import aiohttp -import grpc -import tritonclient.grpc -import tritonclient.http - -from pytriton.client.exceptions import PyTritonClientModelUnavailableError, PyTritonClientTimeoutError -from pytriton.client.utils import LATEST_MODEL_VERSION, ModelState, parse_grpc_response, parse_http_response -from pytriton.model_config.parser import ModelConfigParser - -aio_clients = Union[tritonclient.grpc.aio.InferenceServerClient, tritonclient.http.aio.InferenceServerClient] - -_LOGGER = logging.getLogger(__name__) - -_DEFAULT_WAIT_FOR_MODEL_TIMEOUT_S = 60.0 # 60 seconds -_DEFAULT_ASYNC_SLEEP_FACTOR_S = 0.1 # 10% of timeout - - -async def asyncio_get_model_state( - client: aio_clients, - model_name: str, - model_version: Optional[str] = None, -) -> ModelState: - """Obtains state of the model deployed in Triton Inference Server. - - Typical use: - - >>> import tritonclient.http.aio - ... client = tritonclient.http.aio.InferenceServerClient("localhost:8000") - ... model_state = await get_model_state(client, "MyModel", "1") - - Args: - client: Triton Inference Server client to use for communication - model_name: name of the model which state we're requesting. - model_version: - version of the model which state we're requesting. - If model_version is None state of latest model is returned. - The latest versions of the model are the numerically greatest version numbers. - - Returns: - Model state. ModelState.UNAVAILABLE is returned in case if model with given name and version is not found. - - """ - _LOGGER.debug(f"Obtaining model {model_name} state") - repository_index = await client.get_model_repository_index() - _LOGGER.debug("Model repository index obtained") - if isinstance(repository_index, list): - models_states = parse_http_response(models=repository_index) - else: - models_states = parse_grpc_response(models=repository_index.models) - - if model_version is None: - requested_model_states = { - version: state for (name, version), state in models_states.items() if name == model_name - } - if not requested_model_states: - return ModelState.UNAVAILABLE - else: - requested_model_states = sorted(requested_model_states.items(), key=lambda item: int(item[0])) - latest_version, latest_version_state = requested_model_states[-1] - _LOGGER.debug(f"Model {model_name} latest version: {latest_version} state: {latest_version_state}") - return latest_version_state - else: - key = (model_name, model_version) - if key not in models_states: - return ModelState.UNAVAILABLE - else: - model_state = models_states[key] - _LOGGER.debug(f"Model {model_name} version {model_version} state: {model_state}") - return model_state - - -async def asyncio_get_model_config( - client: aio_clients, - model_name: str, - model_version: Optional[str] = None, - timeout_s: float = _DEFAULT_WAIT_FOR_MODEL_TIMEOUT_S, -): - """Obtain configuration of model deployed on the Triton Inference Server. - - Function waits for server readiness. - - Args: - client: Triton Inference Server client to use for communication - model_name: name of the model which configuration we're requesting. - model_version: - version of the model which configuration we're requesting. - If model_version is None configuration of the latest model is returned. - The latest versions of the model are the numerically greatest version numbers. - timeout_s: timeout to finish model configuration obtain. - - Returns: - Configuration of requested model. - - Raises: - PyTritonClientTimeoutError: If obtain of model configuration didn't finish before given timeout. - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - """ - should_finish_before = time.time() + timeout_s - _LOGGER.debug(f"Obtaining model {model_name} config (timeout={timeout_s:0.2f})") - try: - _LOGGER.debug(f"Waiting for model {model_name} to be ready") - await asyncio.wait_for( - asyncio_wait_for_model_ready( - client, model_name=model_name, model_version=model_version, timeout_s=timeout_s - ), - timeout_s, - ) - - model_version = model_version or "" - - timeout_s = max(0, should_finish_before - time.time()) - if isinstance(client, tritonclient.grpc.aio.InferenceServerClient): - _LOGGER.debug(f"Obtaining model {model_name} config as_json=True") - response = await asyncio.wait_for( - client.get_model_config(model_name, model_version, as_json=True), timeout_s - ) - model_config = response["config"] - else: - _LOGGER.debug(f"Obtaining model {model_name} config") - model_config = await asyncio.wait_for(client.get_model_config(model_name, model_version), timeout_s) - _LOGGER.debug("Model config obtained") - model_config = ModelConfigParser.from_dict(model_config) - _LOGGER.debug(f"Model config: {model_config}") - return model_config - except asyncio.TimeoutError as e: - message = f"Timeout while waiting for model {model_name} config (timeout={timeout_s:0.2f})" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - - -async def asyncio_wait_for_server_ready( - asyncio_client: aio_clients, - sleep_time_s: float, -): - """Wait for Triton Inference Server readiness. - - There are two functions, which check server status: - * asyncio_client.is_server_ready() - * asyncio_client.is_server_live() - Both must return true to consider server accessible to read model status. - - Function contains while loop with sleep to check server status periodically. - - Args: - asyncio_client: Triton Inference Server client to use for communication - sleep_time_s: time to sleep between server status checks - - Raises: - PyTritonClientTimeoutError: If obtain of model configuration didn't finish before given timeout. - """ - _LOGGER.debug("Waiting for server to be ready") - try: - while True: - try: - _LOGGER.debug("Waiting for server to be ready") - server_ready = await asyncio_client.is_server_ready() - _LOGGER.debug("Waiting for server to be live") - server_live = await asyncio_client.is_server_live() - except tritonclient.utils.InferenceServerException: - # Raised by tritonclient/grpc/__init__.py:75 - server_live = False - server_ready = False - except aiohttp.client_exceptions.ClientConnectorError: - # This exception is raised by aiohttp/connector.py:901 in _create_direct_connection - # and it is not translated to any other error by tritonclient/http/aio/__init__.py:132 in _get method. - # res = await self._stub.get(url=req_url, - # and tritonclient/http/aio/__init__.py:242 in is_server_ready method. - # response = await self._get(request_uri=request_uri, - server_live = False - server_ready = False - except RuntimeError: - # This exception is raised by aiohttp/client.py:400 in _request - # and it is not translated to any other error by tritonclient/grpc/aio/__init__.py:151: in is_server_ready method. - # response = await self._client_stub.ServerReady(request=request, - server_live = False - server_ready = False - except grpc._cython.cygrpc.UsageError: - # This exception is raised by grpcio/grpc/_cython/_cygrpc/aio/channel.pyx.pxi:124 - # and it is not translated to any other error by tritonclient/grpc/aio/__init__.py", line 151, in is_server_ready - # response = await self._client_stub.ServerReady(request=request, - server_live = False - server_ready = False - if server_ready and server_live: - break - _LOGGER.debug(f"Sleeping for {sleep_time_s:0.2f} seconds") - await asyncio.sleep(sleep_time_s) - except asyncio.TimeoutError as e: - # This error is caused by our timeout, not by Triton Inference Server client. - message = "Timeout while waiting for model" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - _LOGGER.debug("Server is ready") - - -async def asyncio_wait_for_model_status_loaded( - asyncio_client: aio_clients, - model_name: str, - sleep_time_s: float, - model_version: Optional[str] = None, -): - """Wait for model status loaded. - - Function runs the following async function to check model status: - ```python - asyncio_get_model_state(asyncio_client, model_name, model_version) - ``` - If it return _ModelState.READY, then another async function can check if model is really ready: - ```python - asyncio_client.is_model_ready(model_name) - ``` - This function uses the above functions to check if model is ready together - with asyncio.wait_for(...) to limit the time of waiting. - - Function contains while loop with sleep to check model status periodically. - - Args: - asyncio_client: Triton Inference Server client to use for communication - model_name: name of the model which configuration we're requesting. - model_version: - version of the model which configuration we're requesting. - If model_version is None configuration of the latest model is returned. - The latest versions of the model are the numerically greatest version numbers. - sleep_time_s: time interval, in seconds, between successive checks to determine if the model configuration has been completed. - - Raises: - PyTritonClientTimeoutError: If obtain of model configuration didn't finish before given timeout. - """ - model_version = model_version or "" - model_version_msg = model_version or LATEST_MODEL_VERSION - _LOGGER.debug(f"Waiting for model {model_name}, {model_version_msg} to be ready") - try: - while True: - _LOGGER.debug(f"Checking if model {model_name} is ready") - is_model_ready = await asyncio_client.is_model_ready(model_name, model_version) - if is_model_ready: - break - _LOGGER.debug(f"Sleeping for {sleep_time_s} seconds") - await asyncio.sleep(sleep_time_s) - except asyncio.TimeoutError as e: - message = f"Timeout while waiting for model {model_name} state (timeout={sleep_time_s:0.2f})" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - _LOGGER.debug(f"Model {model_name}, {model_version_msg} is ready") - - -async def asyncio_wait_for_model_ready( - asyncio_client: aio_clients, - model_name: str, - model_version: Optional[str] = None, - timeout_s: float = _DEFAULT_WAIT_FOR_MODEL_TIMEOUT_S, -): - """Wait for Triton Inference Server and deployed on it model readiness. - - Args: - asyncio_client: Triton Inference Server client to use for communication - model_name: name of the model which configuration we're requesting. - model_version: - version of the model which configuration we're requesting. - If model_version is None configuration of the latest model is returned. - The latest versions of the model are the numerically greatest version numbers. - timeout_s: timeout to finish model configuration obtain. - - Raises: - PyTritonClientTimeoutError: If obtain of model configuration didn't finish before given timeout. - - """ - _LOGGER.debug(f"Waiting for model {model_name} to be ready (timeout={timeout_s:0.2f})") - sleep_time_s = timeout_s * _DEFAULT_ASYNC_SLEEP_FACTOR_S - try: - should_finish_before = time.time() + timeout_s - await asyncio.wait_for(asyncio_wait_for_server_ready(asyncio_client, sleep_time_s), timeout_s) - _LOGGER.debug(f"Waiting for model {model_name} to be ready") - timeout_s = max(0, should_finish_before - time.time()) - await asyncio.wait_for( - asyncio_wait_for_model_status_loaded( - asyncio_client, model_name=model_name, model_version=model_version, sleep_time_s=sleep_time_s - ), - timeout_s, - ) - except PyTritonClientModelUnavailableError as e: - _LOGGER.error(f"Failed to obtain model {model_name} config error {e}") - raise e - except asyncio.TimeoutError as e: - _LOGGER.error(f"Failed to obtain model {model_name} config error {e}") - raise PyTritonClientTimeoutError( - f"Timeout while waiting for model {model_name} to be ready (timeout={timeout_s:0.2f})" - ) from e - _LOGGER.debug(f"Model {model_name} is ready") diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/client/client.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/client/client.py deleted file mode 100644 index 5e4c68fdf29efc8d2d271c15a86552155aeb5879..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/client/client.py +++ /dev/null @@ -1,2033 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Clients for easy interaction with models deployed on the Triton Inference Server. - -Typical usage example: - -```python -client = ModelClient("localhost", "MyModel") -result_dict = client.infer_sample(input_a=a, input_b=b) -client.close() -``` - -Inference inputs can be provided either as positional or keyword arguments: - -```python -result_dict = client.infer_sample(input1, input2) -result_dict = client.infer_sample(a=input1, b=input2) -``` - -Mixing of argument passing conventions is not supported and will raise PyTritonClientValueError. -""" - -import asyncio -import contextlib -import itertools -import logging -import socket -import time -import warnings -from concurrent.futures import Future -from queue import Empty, Full, Queue -from threading import Lock, Thread -from typing import Any, Dict, Optional, Tuple, Union - -import gevent -import numpy as np -import tritonclient.grpc -import tritonclient.grpc.aio -import tritonclient.http -import tritonclient.http.aio -import tritonclient.utils - -from pytriton.client.asyncio_utils import asyncio_get_model_config, asyncio_wait_for_model_ready -from pytriton.client.exceptions import ( - PyTritonClientClosedError, - PyTritonClientInferenceServerError, - PyTritonClientModelDoesntSupportBatchingError, - PyTritonClientQueueFullError, - PyTritonClientTimeoutError, - PyTritonClientValueError, -) -from pytriton.client.utils import ( - _DEFAULT_NETWORK_TIMEOUT_S, - _DEFAULT_WAIT_FOR_MODEL_TIMEOUT_S, - TritonUrl, - get_model_config, - wait_for_model_ready, - wait_for_server_ready, -) -from pytriton.client.warnings import NotSupportedTimeoutWarning -from pytriton.model_config.triton_model_config import TritonModelConfig - -_LOGGER = logging.getLogger(__name__) - -_DEFAULT_SYNC_INIT_TIMEOUT_S = _DEFAULT_WAIT_FOR_MODEL_TIMEOUT_S -_DEFAULT_FUTURES_INIT_TIMEOUT_S = _DEFAULT_WAIT_FOR_MODEL_TIMEOUT_S -DEFAULT_INFERENCE_TIMEOUT_S = 60.0 - - -_IOType = Union[Tuple[np.ndarray, ...], Dict[str, np.ndarray]] - - -def _verify_inputs_args(inputs, named_inputs): - if not inputs and not named_inputs: - raise PyTritonClientValueError("Provide input data") - if not bool(inputs) ^ bool(named_inputs): - raise PyTritonClientValueError("Use either positional either keyword method arguments convention") - - -def _verify_parameters(parameters_or_headers: Optional[Dict[str, Union[str, int, bool]]] = None): - if parameters_or_headers is None: - return - if not isinstance(parameters_or_headers, dict): - raise PyTritonClientValueError("Parameters and headers must be a dictionary") - for key, value in parameters_or_headers.items(): - if not isinstance(key, str): - raise PyTritonClientValueError("Parameter/header key must be a string") - if not isinstance(value, (str, int, bool)): - raise PyTritonClientValueError("Parameter/header value must be a string, integer or boolean") - - -class BaseModelClient: - """Base client for model deployed on the Triton Inference Server.""" - - def __init__( - self, - url: str, - model_name: str, - model_version: Optional[str] = None, - *, - lazy_init: bool = True, - init_timeout_s: Optional[float] = None, - inference_timeout_s: Optional[float] = None, - model_config: Optional[TritonModelConfig] = None, - ensure_model_is_ready: bool = True, - ): - """Inits BaseModelClient for given model deployed on the Triton Inference Server. - - Common usage: - - ```python - client = ModelClient("localhost", "BERT") - result_dict = client.infer_sample(input1_sample, input2_sample) - client.close() - ``` - - Args: - url: The Triton Inference Server url, e.g. `grpc://localhost:8001`. - In case no scheme is provided http scheme will be used as default. - In case no port is provided default port for given scheme will be used - - 8001 for grpc scheme, 8000 for http scheme. - model_name: name of the model to interact with. - model_version: version of the model to interact with. - If model_version is None inference on latest model will be performed. - The latest versions of the model are numerically the greatest version numbers. - lazy_init: if initialization should be performed just before sending first request to inference server. - init_timeout_s: timeout in seconds for the server and model to be ready. If not passed, the default timeout of 300 seconds will be used. - inference_timeout_s: timeout in seconds for a single model inference request. If not passed, the default timeout of 60 seconds will be used. - model_config: model configuration. If not passed, it will be read from inference server during initialization. - ensure_model_is_ready: if model should be checked if it is ready before first inference request. - - Raises: - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - PyTritonClientTimeoutError: - if `lazy_init` argument is False and wait time for server and model being ready exceeds `init_timeout_s`. - PyTritonClientInvalidUrlError: If provided Triton Inference Server url is invalid. - """ - self._init_timeout_s = _DEFAULT_SYNC_INIT_TIMEOUT_S if init_timeout_s is None else init_timeout_s - self._inference_timeout_s = DEFAULT_INFERENCE_TIMEOUT_S if inference_timeout_s is None else inference_timeout_s - self._network_timeout_s = min(_DEFAULT_NETWORK_TIMEOUT_S, self._init_timeout_s) - - self._general_client = self.create_client_from_url(url, network_timeout_s=self._network_timeout_s) - self._infer_client = self.create_client_from_url(url, network_timeout_s=self._inference_timeout_s) - - self._model_name = model_name - self._model_version = model_version - - self._request_id_generator = itertools.count(0) - - # Monkey patch __del__ method from client to catch error in client when instance is garbage collected. - # This is needed because we are closing client in __exit__ method or in close method. - # (InferenceClient uses gevent library which does not support closing twice from different threads) - self._monkey_patch_client() - - if model_config is not None: - self._model_config = model_config - self._model_ready = None if ensure_model_is_ready else True - - else: - self._model_config = None - self._model_ready = None - self._lazy_init: bool = lazy_init - - self._handle_lazy_init() - - @classmethod - def from_existing_client(cls, existing_client: "BaseModelClient"): - """Create a new instance from an existing client using the same class. - - Common usage: - ```python - client = BaseModelClient.from_existing_client(existing_client) - ``` - - Args: - existing_client: An instance of an already initialized subclass. - - Returns: - A new instance of the same subclass with shared configuration and readiness state. - """ - kwargs = {} - # Copy model configuration and readiness state if present - if hasattr(existing_client, "_model_config"): - kwargs["model_config"] = existing_client._model_config - kwargs["ensure_model_is_ready"] = False - - new_client = cls( - url=existing_client._url, - model_name=existing_client._model_name, - model_version=existing_client._model_version, - init_timeout_s=existing_client._init_timeout_s, - inference_timeout_s=existing_client._inference_timeout_s, - **kwargs, - ) - - return new_client - - def create_client_from_url(self, url: str, network_timeout_s: Optional[float] = None): - """Create Triton Inference Server client. - - Args: - url: url of the server to connect to. - If url doesn't contain scheme (e.g. "localhost:8001") http scheme is added. - If url doesn't contain port (e.g. "localhost") default port for given scheme is added. - network_timeout_s: timeout for client commands. Default value is 60.0 s. - - Returns: - Triton Inference Server client. - - Raises: - PyTritonClientInvalidUrlError: If provided Triton Inference Server url is invalid. - """ - self._triton_url = TritonUrl.from_url(url) - self._url = self._triton_url.without_scheme - self._triton_client_lib = self.get_lib() - self._monkey_patch_client() - - if self._triton_url.scheme == "grpc": - # by default grpc client has very large number of timeout, thus we want to make it equal to http client timeout - network_timeout_s = _DEFAULT_NETWORK_TIMEOUT_S if network_timeout_s is None else network_timeout_s - warnings.warn( - f"tritonclient.grpc doesn't support timeout for other commands than infer. Ignoring network_timeout: {network_timeout_s}.", - NotSupportedTimeoutWarning, - stacklevel=1, - ) - - triton_client_init_kwargs = self._get_init_extra_args() - - _LOGGER.debug( - f"Creating InferenceServerClient for {self._triton_url.with_scheme} with {triton_client_init_kwargs}" - ) - return self._triton_client_lib.InferenceServerClient(self._url, **triton_client_init_kwargs) - - def get_lib(self): - """Returns tritonclient library for given scheme.""" - raise NotImplementedError - - @property - def _next_request_id(self) -> str: - # pytype complained about creating generator in __init__ method - # so we create it lazily - if getattr(self, "_request_id_generator", None) is None: - self._request_id_generator = itertools.count(0) - return str(next(self._request_id_generator)) - - def _get_init_extra_args(self): - timeout = self._inference_timeout_s # pytype: disable=attribute-error - # The inference timeout is used for both the HTTP and the GRPC protocols. However, - # the way the timeout is passed to the client differs depending on the protocol. - # For the HTTP protocol, the timeout is set in the ``__init__`` method as ``network_timeout`` - # and ``connection_timeout``. For the GRPC protocol, the timeout - # is passed to the infer method as ``client_timeout``. - # Both protocols support timeouts correctly and will raise an exception - # if the network request or the inference process takes longer than the timeout. - # This is a design choice of the underlying tritonclient library. - - if self._triton_url.scheme != "http": - return {} - - kwargs = { - # This value sets the maximum time allowed for each network request in both model loading and inference process - "network_timeout": timeout, - # This value sets the maximum time allowed for establishing a connection to the server. - # We use the inference timeout here instead of the init timeout because the init timeout - # is meant for waiting for the model to be ready. The connection timeout should be shorter - # than the init timeout because it only checks if connection is established (e.g. correct port) - "connection_timeout": timeout, - } - return kwargs - - def _monkey_patch_client(self): - pass - - def _get_model_config_extra_args(self): - # For the GRPC protocol, the timeout must be passed to the each request as client_timeout - # model_config doesn't yet support timeout but it is planned for the future - # grpc_network_timeout_s will be used for model_config - return {} - - def _handle_lazy_init(self): - raise NotImplementedError - - -def _run_once_per_lib(f): - def wrapper(_self): - if _self._triton_client_lib not in wrapper.patched: - wrapper.patched.add(_self._triton_client_lib) - return f(_self) - - wrapper.patched = set() - return wrapper - - -class ModelClient(BaseModelClient): - """Synchronous client for model deployed on the Triton Inference Server.""" - - def __init__( - self, - url: str, - model_name: str, - model_version: Optional[str] = None, - *, - lazy_init: bool = True, - init_timeout_s: Optional[float] = None, - inference_timeout_s: Optional[float] = None, - model_config: Optional[TritonModelConfig] = None, - ensure_model_is_ready: bool = True, - ): - """Inits ModelClient for given model deployed on the Triton Inference Server. - - If `lazy_init` argument is False, model configuration will be read - from inference server during initialization. - - Common usage: - - ```python - client = ModelClient("localhost", "BERT") - result_dict = client.infer_sample(input1_sample, input2_sample) - client.close() - ``` - - Client supports also context manager protocol: - - ```python - with ModelClient("localhost", "BERT") as client: - result_dict = client.infer_sample(input1_sample, input2_sample) - ``` - - The creation of client requires connection to the server and downloading model configuration. You can create client from existing client using the same class: - - ```python - client = ModelClient.from_existing_client(existing_client) - ``` - - Args: - url: The Triton Inference Server url, e.g. 'grpc://localhost:8001'. - In case no scheme is provided http scheme will be used as default. - In case no port is provided default port for given scheme will be used - - 8001 for grpc scheme, 8000 for http scheme. - model_name: name of the model to interact with. - model_version: version of the model to interact with. - If model_version is None inference on latest model will be performed. - The latest versions of the model are numerically the greatest version numbers. - lazy_init: if initialization should be performed just before sending first request to inference server. - init_timeout_s: timeout for maximum waiting time in loop, which sends retry requests ask if model is ready. It is applied at initialization time only when `lazy_init` argument is False. Default is to do retry loop at first inference. - inference_timeout_s: timeout in seconds for the model inference process. - If non passed default 60 seconds timeout will be used. - For HTTP client it is not only inference timeout but any client request timeout - - get model config, is model loaded. For GRPC client it is only inference timeout. - model_config: model configuration. If not passed, it will be read from inference server during initialization. - ensure_model_is_ready: if model should be checked if it is ready before first inference request. - - Raises: - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - PyTritonClientTimeoutError: - if `lazy_init` argument is False and wait time for server and model being ready exceeds `init_timeout_s`. - PyTritonClientUrlParseError: In case of problems with parsing url. - """ - super().__init__( - url=url, - model_name=model_name, - model_version=model_version, - lazy_init=lazy_init, - init_timeout_s=init_timeout_s, - inference_timeout_s=inference_timeout_s, - model_config=model_config, - ensure_model_is_ready=ensure_model_is_ready, - ) - - def get_lib(self): - """Returns tritonclient library for given scheme.""" - return {"grpc": tritonclient.grpc, "http": tritonclient.http}[self._triton_url.scheme.lower()] - - def __enter__(self): - """Create context for using ModelClient as a context manager.""" - return self - - def __exit__(self, *_): - """Close resources used by ModelClient instance when exiting from the context.""" - self.close() - - def load_model(self, config: Optional[str] = None, files: Optional[dict] = None): - """Load model on the Triton Inference Server. - - Args: - config: str - Optional JSON representation of a model config provided for - the load request, if provided, this config will be used for - loading the model. - files: dict - Optional dictionary specifying file path (with "file:" prefix) in - the override model directory to the file content as bytes. - The files will form the model directory that the model will be - loaded from. If specified, 'config' must be provided to be - the model configuration of the override model directory. - """ - self._general_client.load_model(self._model_name, config=config, files=files) - - def unload_model(self): - """Unload model from the Triton Inference Server.""" - self._general_client.unload_model(self._model_name) - - def close(self): - """Close resources used by ModelClient. - - This method closes the resources used by the ModelClient instance, - including the Triton Inference Server connections. - Once this method is called, the ModelClient instance should not be used again. - """ - _LOGGER.debug("Closing ModelClient") - try: - if self._general_client is not None: - self._general_client.close() - if self._infer_client is not None: - self._infer_client.close() - self._general_client = None - self._infer_client = None - except Exception as e: - _LOGGER.error(f"Error while closing ModelClient resources: {e}") - raise e - - def wait_for_model(self, timeout_s: float): - """Wait for the Triton Inference Server and the deployed model to be ready. - - Args: - timeout_s: timeout in seconds to wait for the server and model to be ready. - - Raises: - PyTritonClientTimeoutError: If the server and model are not ready before the given timeout. - PyTritonClientModelUnavailableError: If the model with the given name (and version) is unavailable. - KeyboardInterrupt: If the hosting process receives SIGINT. - PyTritonClientClosedError: If the ModelClient is closed. - """ - if self._general_client is None: - raise PyTritonClientClosedError("ModelClient is closed") - wait_for_model_ready(self._general_client, self._model_name, self._model_version, timeout_s=timeout_s) - - @property - def is_batching_supported(self): - """Checks if model supports batching. - - Also waits for server to get into readiness state. - """ - return self.model_config.max_batch_size > 0 - - def wait_for_server(self, timeout_s: float): - """Wait for Triton Inference Server readiness. - - Args: - timeout_s: timeout to server get into readiness state. - - Raises: - PyTritonClientTimeoutError: If server is not in readiness state before given timeout. - KeyboardInterrupt: If hosting process receives SIGINT - """ - wait_for_server_ready(self._general_client, timeout_s=timeout_s) - - @property - def model_config(self) -> TritonModelConfig: - """Obtain the configuration of the model deployed on the Triton Inference Server. - - This method waits for the server to get into readiness state before obtaining the model configuration. - - Returns: - TritonModelConfig: configuration of the model deployed on the Triton Inference Server. - - Raises: - PyTritonClientTimeoutError: If the server and model are not in readiness state before the given timeout. - PyTritonClientModelUnavailableError: If the model with the given name (and version) is unavailable. - KeyboardInterrupt: If the hosting process receives SIGINT. - PyTritonClientClosedError: If the ModelClient is closed. - """ - if not self._model_config: - if self._general_client is None: - raise PyTritonClientClosedError("ModelClient is closed") - - self._model_config = get_model_config( - self._general_client, self._model_name, self._model_version, timeout_s=self._init_timeout_s - ) - return self._model_config - - def infer_sample( - self, - *inputs, - parameters: Optional[Dict[str, Union[str, int, bool]]] = None, - headers: Optional[Dict[str, Union[str, int, bool]]] = None, - **named_inputs, - ) -> Dict[str, np.ndarray]: - """Run synchronous inference on a single data sample. - - Typical usage: - - ```python - client = ModelClient("localhost", "MyModel") - result_dict = client.infer_sample(input1, input2) - client.close() - ``` - - Inference inputs can be provided either as positional or keyword arguments: - - ```python - result_dict = client.infer_sample(input1, input2) - result_dict = client.infer_sample(a=input1, b=input2) - ``` - - Args: - *inputs: Inference inputs provided as positional arguments. - parameters: Custom inference parameters. - headers: Custom inference headers. - **named_inputs: Inference inputs provided as named arguments. - - Returns: - Dictionary with inference results, where dictionary keys are output names. - - Raises: - PyTritonClientValueError: If mixing of positional and named arguments passing detected. - PyTritonClientTimeoutError: If the wait time for the server and model being ready exceeds `init_timeout_s` or - inference request time exceeds `inference_timeout_s`. - PyTritonClientModelUnavailableError: If the model with the given name (and version) is unavailable. - PyTritonClientInferenceServerError: If an error occurred on the inference callable or Triton Inference Server side. - """ - _verify_inputs_args(inputs, named_inputs) - _verify_parameters(parameters) - _verify_parameters(headers) - - if self.is_batching_supported: - if inputs: - inputs = tuple(data[np.newaxis, ...] for data in inputs) - elif named_inputs: - named_inputs = {name: data[np.newaxis, ...] for name, data in named_inputs.items()} - - result = self._infer(inputs or named_inputs, parameters, headers) - - return self._debatch_result(result) - - def infer_batch( - self, - *inputs, - parameters: Optional[Dict[str, Union[str, int, bool]]] = None, - headers: Optional[Dict[str, Union[str, int, bool]]] = None, - **named_inputs, - ) -> Dict[str, np.ndarray]: - """Run synchronous inference on batched data. - - Typical usage: - - ```python - client = ModelClient("localhost", "MyModel") - result_dict = client.infer_batch(input1, input2) - client.close() - ``` - - Inference inputs can be provided either as positional or keyword arguments: - - ```python - result_dict = client.infer_batch(input1, input2) - result_dict = client.infer_batch(a=input1, b=input2) - ``` - - Args: - *inputs: Inference inputs provided as positional arguments. - parameters: Custom inference parameters. - headers: Custom inference headers. - **named_inputs: Inference inputs provided as named arguments. - - Returns: - Dictionary with inference results, where dictionary keys are output names. - - Raises: - PyTritonClientValueError: If mixing of positional and named arguments passing detected. - PyTritonClientTimeoutError: If the wait time for the server and model being ready exceeds `init_timeout_s` or - inference request time exceeds `inference_timeout_s`. - PyTritonClientModelUnavailableError: If the model with the given name (and version) is unavailable. - PyTritonClientInferenceServerError: If an error occurred on the inference callable or Triton Inference Server side. - PyTritonClientModelDoesntSupportBatchingError: If the model doesn't support batching. - PyTritonClientValueError: if mixing of positional and named arguments passing detected. - PyTritonClientTimeoutError: - in case of first method call, `lazy_init` argument is False - and wait time for server and model being ready exceeds `init_timeout_s` or - inference time exceeds `inference_timeout_s` passed to `__init__`. - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - PyTritonClientInferenceServerError: If error occurred on inference callable or Triton Inference Server side, - """ - _verify_inputs_args(inputs, named_inputs) - _verify_parameters(parameters) - _verify_parameters(headers) - - if not self.is_batching_supported: - raise PyTritonClientModelDoesntSupportBatchingError( - f"Model {self.model_config.model_name} doesn't support batching - use infer_sample method instead" - ) - - return self._infer(inputs or named_inputs, parameters, headers) - - def _wait_and_init_model_config(self, init_timeout_s: float): - if self._general_client is None: - raise PyTritonClientClosedError("ModelClient is closed") - - should_finish_before_s = time.time() + init_timeout_s - self.wait_for_model(init_timeout_s) - self._model_ready = True - timeout_s = max(0.0, should_finish_before_s - time.time()) - self._model_config = get_model_config( - self._general_client, self._model_name, self._model_version, timeout_s=timeout_s - ) - - def _create_request(self, inputs: _IOType): - if self._infer_client is None: - raise PyTritonClientClosedError("ModelClient is closed") - - if not self._model_ready: - self._wait_and_init_model_config(self._init_timeout_s) - - if isinstance(inputs, Tuple): - inputs = {input_spec.name: input_data for input_spec, input_data in zip(self.model_config.inputs, inputs)} - - inputs_wrapped = [] - - # to help pytype to obtain variable type - inputs: Dict[str, np.ndarray] - - for input_name, input_data in inputs.items(): - if input_data.dtype == object and not isinstance(input_data.reshape(-1)[0], bytes): - raise RuntimeError( - f"Numpy array for {input_name!r} input with dtype=object should contain encoded strings \ - \\(e.g. into utf-8\\). Element type: {type(input_data.reshape(-1)[0])}" - ) - if input_data.dtype.type == np.str_: - raise RuntimeError( - "Unicode inputs are not supported. " - f"Encode numpy array for {input_name!r} input (ex. with np.char.encode(array, 'utf-8'))." - ) - triton_dtype = tritonclient.utils.np_to_triton_dtype(input_data.dtype) - infer_input = self._triton_client_lib.InferInput(input_name, input_data.shape, triton_dtype) - infer_input.set_data_from_numpy(input_data) - inputs_wrapped.append(infer_input) - - outputs_wrapped = [ - self._triton_client_lib.InferRequestedOutput(output_spec.name) for output_spec in self.model_config.outputs - ] - return inputs_wrapped, outputs_wrapped - - def _infer(self, inputs: _IOType, parameters, headers) -> Dict[str, np.ndarray]: - if self.model_config.decoupled: - raise PyTritonClientInferenceServerError("Model config is decoupled. Use DecoupledModelClient instead.") - - inputs_wrapped, outputs_wrapped = self._create_request(inputs) - - try: - _LOGGER.debug("Sending inference request to Triton Inference Server") - response = self._infer_client.infer( - model_name=self._model_name, - model_version=self._model_version or "", - inputs=inputs_wrapped, - headers=headers, - outputs=outputs_wrapped, - request_id=self._next_request_id, - parameters=parameters, - **self._get_infer_extra_args(), - ) - except tritonclient.utils.InferenceServerException as e: - # tritonclient.grpc raises execption with message containing "Deadline Exceeded" for timeout - if "Deadline Exceeded" in e.message(): - raise PyTritonClientTimeoutError( - f"Timeout occurred during inference request. Timeout: {self._inference_timeout_s} s. Message: {e.message()}" - ) from e - - raise PyTritonClientInferenceServerError( - f"Error occurred during inference request. Message: {e.message()}" - ) from e - except socket.timeout as e: # tritonclient.http raises socket.timeout for timeout - message = f"Timeout occurred during inference request. Timeout: {self._inference_timeout_s} s Message: {e}" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - except OSError as e: # tritonclient.http raises socket.error for connection error - message = f"Timeout occurred during inference request. Timeout: {self._inference_timeout_s} s Message: {e}" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - - if isinstance(response, tritonclient.http.InferResult): - outputs = { - output["name"]: response.as_numpy(output["name"]) for output in response.get_response()["outputs"] - } - else: - outputs = {output.name: response.as_numpy(output.name) for output in response.get_response().outputs} - - return outputs - - def _get_numpy_result(self, result): - if isinstance(result, tritonclient.grpc.InferResult): - result = {output.name: result.as_numpy(output.name) for output in result.get_response().outputs} - else: - result = {output["name"]: result.as_numpy(output["name"]) for output in result.get_response()["outputs"]} - return result - - def _debatch_result(self, result): - if self.is_batching_supported: - result = {name: data[0] for name, data in result.items()} - return result - - def _handle_lazy_init(self): - if not self._lazy_init: - self._wait_and_init_model_config(self._init_timeout_s) - - def _get_infer_extra_args(self): - if self._triton_url.scheme == "http": - return {} - # For the GRPC protocol, the timeout is passed to the infer method as client_timeout - # This timeout applies to the whole inference process and each network request - - # The ``infer`` supports also timeout argument for both GRPC and HTTP. - # It is applied at server side and supported only for dynamic batching. - # However, it is not used here yet and planned for future release - kwargs = {"client_timeout": self._inference_timeout_s} - return kwargs - - @_run_once_per_lib - def _monkey_patch_client(self): - """Monkey patch InferenceServerClient to catch error in __del__.""" - _LOGGER.info(f"Patch ModelClient {self._triton_url.scheme}") - if not hasattr(self._triton_client_lib.InferenceServerClient, "__del__"): - return - - old_del = self._triton_client_lib.InferenceServerClient.__del__ - - def _monkey_patched_del(self): - """Monkey patched del.""" - try: - old_del(self) - except gevent.exceptions.InvalidThreadUseError: - _LOGGER.info("gevent.exceptions.InvalidThreadUseError in __del__ of InferenceServerClient") - except Exception as e: - _LOGGER.error("Exception in __del__ of InferenceServerClient: %s", e) - - self._triton_client_lib.InferenceServerClient.__del__ = _monkey_patched_del - - -class DecoupledModelClient(ModelClient): - """Synchronous client for decoupled model deployed on the Triton Inference Server.""" - - def __init__( - self, - url: str, - model_name: str, - model_version: Optional[str] = None, - *, - lazy_init: bool = True, - init_timeout_s: Optional[float] = None, - inference_timeout_s: Optional[float] = None, - model_config: Optional[TritonModelConfig] = None, - ensure_model_is_ready: bool = True, - ): - """Inits DecoupledModelClient for given decoupled model deployed on the Triton Inference Server. - - Common usage: - - ```python - client = DecoupledModelClient("localhost", "BERT") - for response in client.infer_sample(input1_sample, input2_sample): - print(response) - client.close() - ``` - - Args: - url: The Triton Inference Server url, e.g. `grpc://localhost:8001`. - In case no scheme is provided http scheme will be used as default. - In case no port is provided default port for given scheme will be used - - 8001 for grpc scheme, 8000 for http scheme. - model_name: name of the model to interact with. - model_version: version of the model to interact with. - If model_version is None inference on latest model will be performed. - The latest versions of the model are numerically the greatest version numbers. - lazy_init: if initialization should be performed just before sending first request to inference server. - init_timeout_s: timeout in seconds for the server and model to be ready. If not passed, the default timeout of 300 seconds will be used. - inference_timeout_s: timeout in seconds for a single model inference request. If not passed, the default timeout of 60 seconds will be used. - model_config: model configuration. If not passed, it will be read from inference server during initialization. - ensure_model_is_ready: if model should be checked if it is ready before first inference request. - - Raises: - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - PyTritonClientTimeoutError: - if `lazy_init` argument is False and wait time for server and model being ready exceeds `init_timeout_s`. - PyTritonClientInvalidUrlError: If provided Triton Inference Server url is invalid. - """ - super().__init__( - url, - model_name, - model_version, - lazy_init=lazy_init, - init_timeout_s=init_timeout_s, - inference_timeout_s=inference_timeout_s, - model_config=model_config, - ensure_model_is_ready=ensure_model_is_ready, - ) - if self._triton_url.scheme == "http": - raise PyTritonClientValueError("DecoupledModelClient is only supported for grpc protocol") - self._queue = Queue() - self._lock = Lock() - - def close(self): - """Close resources used by DecoupledModelClient.""" - _LOGGER.debug("Closing DecoupledModelClient") - if self._lock.acquire(blocking=False): - try: - super().close() - finally: - self._lock.release() - else: - _LOGGER.warning("DecoupledModelClient is stil streaming answers") - self._infer_client.stop_stream(False) - super().close() - - def _infer(self, inputs: _IOType, parameters, headers): - if not self._lock.acquire(blocking=False): - raise PyTritonClientInferenceServerError("Inference is already in progress") - if not self.model_config.decoupled: - raise PyTritonClientInferenceServerError("Model config is coupled. Use ModelClient instead.") - - inputs_wrapped, outputs_wrapped = self._create_request(inputs) - if parameters is not None: - raise PyTritonClientValueError("DecoupledModelClient does not support parameters") - if headers is not None: - raise PyTritonClientValueError("DecoupledModelClient does not support headers") - try: - _LOGGER.debug("Sending inference request to Triton Inference Server") - if self._infer_client._stream is None: - self._infer_client.start_stream(callback=lambda result, error: self._response_callback(result, error)) - - self._infer_client.async_stream_infer( - model_name=self._model_name, - model_version=self._model_version or "", - inputs=inputs_wrapped, - outputs=outputs_wrapped, - request_id=self._next_request_id, - enable_empty_final_response=True, - **self._get_infer_extra_args(), - ) - except tritonclient.utils.InferenceServerException as e: - # tritonclient.grpc raises execption with message containing "Deadline Exceeded" for timeout - if "Deadline Exceeded" in e.message(): - raise PyTritonClientTimeoutError( - f"Timeout occurred during inference request. Timeout: {self._inference_timeout_s} s. Message: {e.message()}" - ) from e - - raise PyTritonClientInferenceServerError( - f"Error occurred during inference request. Message: {e.message()}" - ) from e - except socket.timeout as e: # tritonclient.http raises socket.timeout for timeout - message = f"Timeout occurred during inference request. Timeout: {self._inference_timeout_s} s Message: {e}" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - except OSError as e: # tritonclient.http raises socket.error for connection error - message = f"Timeout occurred during inference request. Timeout: {self._inference_timeout_s} s Message: {e}" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - _LOGGER.debug("Returning response iterator") - return self._create_response_iterator() - - def _response_callback(self, response, error): - _LOGGER.debug(f"Received response from Triton Inference Server: {response}") - if error: - _LOGGER.error(f"Error occurred during inference request. Message: {error}") - self._queue.put(error) - else: - actual_response = response.get_response() - # Check if the object is not None - triton_final_response = actual_response.parameters.get("triton_final_response") - if triton_final_response and triton_final_response.bool_param: - self._queue.put(None) - else: - result = self._get_numpy_result(response) - self._queue.put(result) - - def _create_response_iterator(self): - try: - while True: - try: - item = self._queue.get(self._inference_timeout_s) - except Empty as e: - message = f"Timeout occurred during inference request. Timeout: {self._inference_timeout_s} s" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - if isinstance(item, Exception): - message = f"Error occurred during inference request. Message: {item.message()}" - _LOGGER.error(message) - raise PyTritonClientInferenceServerError(message) from item - - if item is None: - break - yield item - finally: - self._lock.release() - - def _debatch_result(self, result): - if self.is_batching_supported: - result = ({name: data[0] for name, data in result_.items()} for result_ in result) - return result - - def _get_infer_extra_args(self): - # kwargs = super()._get_infer_extra_args() - kwargs = {} - # kwargs["enable_empty_final_response"] = True - return kwargs - - -class AsyncioModelClient(BaseModelClient): - """Asyncio client for model deployed on the Triton Inference Server. - - This client is based on Triton Inference Server Python clients and GRPC library: - - ``tritonclient.http.aio.InferenceServerClient`` - - ``tritonclient.grpc.aio.InferenceServerClient`` - - It can wait for server to be ready with model loaded and then perform inference on it. - ``AsyncioModelClient`` supports asyncio context manager protocol. - - Typical usage: - - ```python - from pytriton.client import AsyncioModelClient - import numpy as np - - input1_sample = np.random.rand(1, 3, 224, 224).astype(np.float32) - input2_sample = np.random.rand(1, 3, 224, 224).astype(np.float32) - - client = AsyncioModelClient("localhost", "MyModel") - result_dict = await client.infer_sample(input1_sample, input2_sample) - print(result_dict["output_name"]) - await client.close() - ``` - """ - - def __init__( - self, - url: str, - model_name: str, - model_version: Optional[str] = None, - *, - lazy_init: bool = True, - init_timeout_s: Optional[float] = None, - inference_timeout_s: Optional[float] = None, - model_config: Optional[TritonModelConfig] = None, - ensure_model_is_ready: bool = True, - ): - """Inits ModelClient for given model deployed on the Triton Inference Server. - - If `lazy_init` argument is False, model configuration will be read - from inference server during initialization. - - Args: - url: The Triton Inference Server url, e.g. 'grpc://localhost:8001'. - In case no scheme is provided http scheme will be used as default. - In case no port is provided default port for given scheme will be used - - 8001 for grpc scheme, 8000 for http scheme. - model_name: name of the model to interact with. - model_version: version of the model to interact with. - If model_version is None inference on latest model will be performed. - The latest versions of the model are numerically the greatest version numbers. - lazy_init: if initialization should be performed just before sending first request to inference server. - init_timeout_s: timeout for server and model being ready. - inference_timeout_s: timeout in seconds for a single model inference request. If not passed, the default timeout of 60 seconds will be used. - model_config: model configuration. If not passed, it will be read from inference server during initialization. - ensure_model_is_ready: if model should be checked if it is ready before first inference request. - - Raises: - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - PyTritonClientTimeoutError: if `lazy_init` argument is False and wait time for server and model being ready exceeds `init_timeout_s`. - PyTritonClientUrlParseError: In case of problems with parsing url. - """ - super().__init__( - url=url, - model_name=model_name, - model_version=model_version, - lazy_init=lazy_init, - init_timeout_s=init_timeout_s, - inference_timeout_s=inference_timeout_s, - model_config=model_config, - ensure_model_is_ready=ensure_model_is_ready, - ) - - def get_lib(self): - """Get Triton Inference Server Python client library.""" - return {"grpc": tritonclient.grpc.aio, "http": tritonclient.http.aio}[self._triton_url.scheme.lower()] - - async def __aenter__(self): - """Create context for use AsyncioModelClient as a context manager.""" - _LOGGER.debug("Entering AsyncioModelClient context") - try: - if not self._lazy_init: - _LOGGER.debug("Waiting in AsyncioModelClient context for model to be ready") - await self._wait_and_init_model_config(self._init_timeout_s) - _LOGGER.debug("Model is ready in AsyncioModelClient context") - return self - except Exception as e: - _LOGGER.error("Error occurred during AsyncioModelClient context initialization") - await self.close() - raise e - - async def __aexit__(self, *_): - """Close resources used by AsyncioModelClient when exiting from context.""" - await self.close() - _LOGGER.debug("Exiting AsyncioModelClient context") - - async def close(self): - """Close resources used by _ModelClientBase.""" - _LOGGER.debug("Closing InferenceServerClient") - await self._general_client.close() - await self._infer_client.close() - _LOGGER.debug("InferenceServerClient closed") - - async def wait_for_model(self, timeout_s: float): - """Asynchronous wait for Triton Inference Server and deployed on it model readiness. - - Args: - timeout_s: timeout to server and model get into readiness state. - - Raises: - PyTritonClientTimeoutError: If server and model are not in readiness state before given timeout. - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - KeyboardInterrupt: If hosting process receives SIGINT - """ - _LOGGER.debug(f"Waiting for model {self._model_name} to be ready") - try: - await asyncio.wait_for( - asyncio_wait_for_model_ready( - self._general_client, self._model_name, self._model_version, timeout_s=timeout_s - ), - self._init_timeout_s, - ) - except asyncio.TimeoutError as e: - message = f"Timeout while waiting for model {self._model_name} to be ready for {self._init_timeout_s}s" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - - @property - async def model_config(self): - """Obtain configuration of model deployed on the Triton Inference Server. - - Also waits for server to get into readiness state. - """ - try: - if not self._model_config: - kwargs = self._get_model_config_extra_args() - _LOGGER.debug(f"Obtaining model config for {self._model_name}") - - self._model_config = await asyncio.wait_for( - asyncio_get_model_config( - self._general_client, - self._model_name, - self._model_version, - timeout_s=self._init_timeout_s, - **kwargs, - ), - self._init_timeout_s, - ) - _LOGGER.debug(f"Obtained model config for {self._model_name}") - return self._model_config - except asyncio.TimeoutError as e: - message = f"Timeout while waiting for model {self._model_name} to be ready for {self._init_timeout_s}s" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - - async def infer_sample( - self, - *inputs, - parameters: Optional[Dict[str, Union[str, int, bool]]] = None, - headers: Optional[Dict[str, Union[str, int, bool]]] = None, - **named_inputs, - ): - """Run asynchronous inference on single data sample. - - Typical usage: - - ```python - client = AsyncioModelClient("localhost", "MyModel") - result_dict = await client.infer_sample(input1, input2) - await client.close() - ``` - - Inference inputs can be provided either as positional or keyword arguments: - - ```python - result_dict = await client.infer_sample(input1, input2) - result_dict = await client.infer_sample(a=input1, b=input2) - ``` - - Mixing of argument passing conventions is not supported and will raise PyTritonClientRuntimeError. - - Args: - *inputs: inference inputs provided as positional arguments. - parameters: custom inference parameters. - headers: custom inference headers. - **named_inputs: inference inputs provided as named arguments. - - Returns: - dictionary with inference results, where dictionary keys are output names. - - Raises: - PyTritonClientValueError: if mixing of positional and named arguments passing detected. - PyTritonClientTimeoutError: - in case of first method call, `lazy_init` argument is False - and wait time for server and model being ready exceeds `init_timeout_s` - or inference time exceeds `timeout_s`. - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - PyTritonClientInferenceServerError: If error occurred on inference callable or Triton Inference Server side. - """ - _verify_inputs_args(inputs, named_inputs) - _verify_parameters(parameters) - _verify_parameters(headers) - - _LOGGER.debug(f"Running inference for {self._model_name}") - model_config = await self.model_config - _LOGGER.debug(f"Model config for {self._model_name} obtained") - - model_supports_batching = model_config.max_batch_size > 0 - if model_supports_batching: - if inputs: - inputs = tuple(data[np.newaxis, ...] for data in inputs) - elif named_inputs: - named_inputs = {name: data[np.newaxis, ...] for name, data in named_inputs.items()} - - _LOGGER.debug(f"Running _infer for {self._model_name}") - result = await self._infer(inputs or named_inputs, parameters, headers) - _LOGGER.debug(f"_infer for {self._model_name} finished") - if model_supports_batching: - result = {name: data[0] for name, data in result.items()} - - return result - - async def infer_batch( - self, - *inputs, - parameters: Optional[Dict[str, Union[str, int, bool]]] = None, - headers: Optional[Dict[str, Union[str, int, bool]]] = None, - **named_inputs, - ): - """Run asynchronous inference on batched data. - - Typical usage: - - ```python - client = AsyncioModelClient("localhost", "MyModel") - result_dict = await client.infer_batch(input1, input2) - await client.close() - ``` - - Inference inputs can be provided either as positional or keyword arguments: - - ```python - result_dict = await client.infer_batch(input1, input2) - result_dict = await client.infer_batch(a=input1, b=input2) - ``` - - Mixing of argument passing conventions is not supported and will raise PyTritonClientValueError. - - Args: - *inputs: inference inputs provided as positional arguments. - parameters: custom inference parameters. - headers: custom inference headers. - **named_inputs: inference inputs provided as named arguments. - - Returns: - dictionary with inference results, where dictionary keys are output names. - - Raises: - PyTritonClientValueError: if mixing of positional and named arguments passing detected. - PyTritonClientTimeoutError: - in case of first method call, `lazy_init` argument is False - and wait time for server and model being ready exceeds `init_timeout_s` - or inference time exceeds `timeout_s`. - PyTritonClientModelDoesntSupportBatchingError: if model doesn't support batching. - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - PyTritonClientInferenceServerError: If error occurred on inference callable or Triton Inference Server side. - """ - _verify_inputs_args(inputs, named_inputs) - _verify_parameters(parameters) - _verify_parameters(headers) - - _LOGGER.debug(f"Running inference for {self._model_name}") - model_config = await self.model_config - _LOGGER.debug(f"Model config for {self._model_name} obtained") - - model_supports_batching = model_config.max_batch_size > 0 - if not model_supports_batching: - _LOGGER.error(f"Model {model_config.model_name} doesn't support batching") - raise PyTritonClientModelDoesntSupportBatchingError( - f"Model {model_config.model_name} doesn't support batching - use infer_sample method instead" - ) - - _LOGGER.debug(f"Running _infer for {self._model_name}") - result = await self._infer(inputs or named_inputs, parameters, headers) - _LOGGER.debug(f"_infer for {self._model_name} finished") - return result - - async def _wait_and_init_model_config(self, init_timeout_s: float): - """Asynchronous wait for model and obtain model configuration. - - Args: - init_timeout_s: timeout for server and model being ready. - - Raises: - PyTritonClientTimeoutError: if wait time for server and model being ready exceeds `init_timeout_s` - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - """ - try: - should_finish_before_s = time.time() + init_timeout_s - _LOGGER.debug(f"Waiting for model {self._model_name} to be ready") - - await asyncio.wait_for(self.wait_for_model(init_timeout_s), init_timeout_s) - _LOGGER.debug(f"Model {self._model_name} is ready") - self._model_ready = True - - timeout_s = max(0.0, should_finish_before_s - time.time()) - _LOGGER.debug(f"Obtaining model config for {self._model_name}") - self._model_config = await asyncio.wait_for( - asyncio_get_model_config( - self._general_client, self._model_name, self._model_version, timeout_s=timeout_s - ), - timeout_s, - ) - _LOGGER.debug(f"Model config for {self._model_name} obtained") - except asyncio.TimeoutError as e: - _LOGGER.error(f"Timeout exceeded while waiting for model {self._model_name} to be ready") - raise PyTritonClientTimeoutError( - f"Timeout exceeded while waiting for model {self._model_name} to be ready" - ) from e - - def _validate_input(self, input_name, input_data): - if input_data.dtype == object and not isinstance(input_data.reshape(-1)[0], bytes): - raise RuntimeError( - f"Numpy array for {input_name!r} input with dtype=object should contain encoded strings \ - \\(e.g. into utf-8\\). Element type: {type(input_data.reshape(-1)[0])}" - ) - if input_data.dtype.type == np.str_: - raise RuntimeError( - "Unicode inputs are not supported. " - f"Encode numpy array for {input_name!r} input (ex. with np.char.encode(array, 'utf-8'))." - ) - - async def _execute_infer(self, model_config, inputs_wrapped, outputs_wrapped, parameters, headers) -> Any: - try: - _LOGGER.debug(f"Sending InferRequest for {self._model_name}") - kwargs = self._get_infer_extra_args() - response = await self._infer_client.infer( - model_name=self._model_name, - model_version=self._model_version or "", - inputs=inputs_wrapped, - headers=headers, - outputs=outputs_wrapped, - request_id=self._next_request_id, - parameters=parameters, - **kwargs, - ) - except asyncio.exceptions.TimeoutError as e: - # HTTP aio client raises asyncio.exceptions.TimeoutError for timeout errors - message = f"Timeout exceeded while running inference for {self._model_name}" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - except tritonclient.utils.InferenceServerException as e: - message = f"Error occurred on Triton Inference Server side:\n {e.message()}" - _LOGGER.error(message) - if "Deadline Exceeded" in e.message(): - # GRPC aio client raises InferenceServerException with message "Deadline Exceeded" - # for timeout errors - raise PyTritonClientTimeoutError(message) from e - else: - raise PyTritonClientInferenceServerError(message) from e - _LOGGER.debug(f"Received InferResponse for {self._model_name}") - outputs = {output_spec.name: response.as_numpy(output_spec.name) for output_spec in model_config.outputs} - return outputs - - async def _infer(self, inputs: _IOType, parameters, headers): - if self._model_ready: - _LOGGER.debug(f"Waiting for model {self._model_name} config") - await self._wait_and_init_model_config(self._init_timeout_s) - _LOGGER.debug(f"Model wait finished for {self._model_name}") - - _LOGGER.debug(f"Obtaining config for {self._model_name}") - model_config = await self.model_config - _LOGGER.debug(f"Model config for {self._model_name} obtained") - if model_config.decoupled: - raise PyTritonClientInferenceServerError( - "Model config is decoupled. Use DecouploedAsyncioModelClient instead." - ) - - if isinstance(inputs, Tuple): - inputs = {input_spec.name: input_data for input_spec, input_data in zip(model_config.inputs, inputs)} - - inputs_wrapped = [] - for input_name, input_data in inputs.items(): - if isinstance(input_data, np.ndarray): - self._validate_input(input_name, input_data) - triton_dtype = tritonclient.utils.np_to_triton_dtype(input_data.dtype) - infer_input = self._triton_client_lib.InferInput(input_name, input_data.shape, triton_dtype) - infer_input.set_data_from_numpy(input_data) - input_wrapped = infer_input - inputs_wrapped.append(input_wrapped) - else: - raise PyTritonClientValueError( - f"Input {input_name} is not a numpy array. Got {type(input_data)} instead." - ) - - outputs_wrapped = [ - self._triton_client_lib.InferRequestedOutput(output_spec.name) for output_spec in model_config.outputs - ] - return await self._execute_infer(model_config, inputs_wrapped, outputs_wrapped, parameters, headers) - - def _handle_lazy_init(self): - # Asynchronous lazy initialization is done in __aenter__ method - pass - - def _get_init_extra_args(self): - # The inference timeout is used for both the HTTP and the GRPC protocols. However, - # the way the timeout is passed to the client differs depending on the protocol. - # For the HTTP protocol, the timeout is set in the ``__init__`` method as ``conn_timeout`` for both connection and request timeouts. - # For the GRPC protocol, the timeout - # is passed to the infer method as ``client_timeout``. - # Both protocols support timeouts correctly and will raise an exception - # if the network request or the inference process takes longer than the timeout. - # This is a design choice of the underlying tritonclient library. - - if self._triton_url.scheme != "http": - return {} - - kwargs = { - # This value sets the maximum time allowed for both connection and network requests in both model loading and inference process - "conn_timeout": self._inference_timeout_s, - } - return kwargs - - def _get_infer_extra_args(self): - if self._triton_url.scheme == "http": - return {} - # For the GRPC protocol, the timeout is passed to the infer method as client_timeout - # This timeout applies to the whole inference process and each network request - - # The ``infer`` supports also timeout argument for both GRPC and HTTP. - # It is applied at server side and supported only for dynamic batching. - # However, it is not used here yet and planned for future release - kwargs = {"client_timeout": self._inference_timeout_s} - return kwargs - - -class AsyncioDecoupledModelClient(AsyncioModelClient): - """Asyncio client for model deployed on the Triton Inference Server. - - This client is based on Triton Inference Server Python clients and GRPC library: - * ``tritonclient.grpc.aio.InferenceServerClient`` - - It can wait for server to be ready with model loaded and then perform inference on it. - ``AsyncioDecoupledModelClient`` supports asyncio context manager protocol. - - The client is intended to be used with decoupled models and will raise an error if model is coupled. - - Typical usage: - ```python - from pytriton.client import AsyncioDecoupledModelClient - import numpy as np - - input1_sample = np.random.rand(1, 3, 224, 224).astype(np.float32) - input2_sample = np.random.rand(1, 3, 224, 224).astype(np.float32) - - async with AsyncioDecoupledModelClient("grpc://localhost", "MyModel") as client: - async for result_dict in client.infer_sample(input1_sample, input2_sample): - print(result_dict["output_name"]) - ``` - """ - - async def infer_sample( - self, - *inputs, - parameters: Optional[Dict[str, Union[str, int, bool]]] = None, - headers: Optional[Dict[str, Union[str, int, bool]]] = None, - **named_inputs, - ): - """Run asynchronous inference on single data sample. - - Typical usage: - - ```python - async with AsyncioDecoupledModelClient("grpc://localhost", "MyModel") as client: - async for result_dict in client.infer_sample(input1_sample, input2_sample): - print(result_dict["output_name"]) - ``` - - Inference inputs can be provided either as positional or keyword arguments: - - ```python - results_iterator = client.infer_sample(input1, input2) - results_iterator = client.infer_sample(a=input1, b=input2) - ``` - - Mixing of argument passing conventions is not supported and will raise PyTritonClientRuntimeError. - - Args: - *inputs: inference inputs provided as positional arguments. - parameters: custom inference parameters. - headers: custom inference headers. - **named_inputs: inference inputs provided as named arguments. - - Returns: - Asynchronous generator, which generates dictionaries with partial inference results, where dictionary keys are output names. - - Raises: - PyTritonClientValueError: if mixing of positional and named arguments passing detected. - PyTritonClientTimeoutError: - in case of first method call, `lazy_init` argument is False - and wait time for server and model being ready exceeds `init_timeout_s` - or inference time exceeds `timeout_s`. - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - PyTritonClientInferenceServerError: If error occurred on inference callable or Triton Inference Server side. - """ - _verify_inputs_args(inputs, named_inputs) - _verify_parameters(parameters) - _verify_parameters(headers) - - _LOGGER.debug(f"Running inference for {self._model_name}") - model_config = await self.model_config - _LOGGER.debug(f"Model config for {self._model_name} obtained") - - model_supports_batching = model_config.max_batch_size > 0 - if model_supports_batching: - if inputs: - inputs = tuple(data[np.newaxis, ...] for data in inputs) - elif named_inputs: - named_inputs = {name: data[np.newaxis, ...] for name, data in named_inputs.items()} - - _LOGGER.debug(f"Running _infer for {self._model_name}") - result = self._infer(inputs or named_inputs, parameters, headers) - _LOGGER.debug(f"_infer for {self._model_name} finished") - - async for item in result: - if model_supports_batching: - debatched_item = {name: data[0] for name, data in item.items()} - yield debatched_item - else: - yield item - - async def infer_batch( - self, - *inputs, - parameters: Optional[Dict[str, Union[str, int, bool]]] = None, - headers: Optional[Dict[str, Union[str, int, bool]]] = None, - **named_inputs, - ): - """Run asynchronous inference on batched data. - - Typical usage: - - ```python - async with AsyncioDecoupledModelClient("grpc://localhost", "MyModel") as client: - async for result_dict in client.infer_batch(input1_sample, input2_sample): - print(result_dict["output_name"]) - ``` - - Inference inputs can be provided either as positional or keyword arguments: - - ```python - results_iterator = client.infer_batch(input1, input2) - results_iterator = client.infer_batch(a=input1, b=input2) - ``` - - Mixing of argument passing conventions is not supported and will raise PyTritonClientRuntimeError. - - Args: - *inputs: inference inputs provided as positional arguments. - parameters: custom inference parameters. - headers: custom inference headers. - **named_inputs: inference inputs provided as named arguments. - - Returns: - Asynchronous generator, which generates dictionaries with partial inference results, where dictionary keys are output names. - - Raises: - PyTritonClientValueError: if mixing of positional and named arguments passing detected. - PyTritonClientTimeoutError: - in case of first method call, `lazy_init` argument is False - and wait time for server and model being ready exceeds `init_timeout_s` - or inference time exceeds `timeout_s`. - PyTritonClientModelDoesntSupportBatchingError: if model doesn't support batching. - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - PyTritonClientInferenceServerError: If error occurred on inference callable or Triton Inference Server side. - """ - _verify_inputs_args(inputs, named_inputs) - _verify_parameters(parameters) - _verify_parameters(headers) - - _LOGGER.debug(f"Running inference for {self._model_name}") - model_config = await self.model_config - _LOGGER.debug(f"Model config for {self._model_name} obtained") - - model_supports_batching = model_config.max_batch_size > 0 - if not model_supports_batching: - _LOGGER.error(f"Model {model_config.model_name} doesn't support batching") - raise PyTritonClientModelDoesntSupportBatchingError( - f"Model {model_config.model_name} doesn't support batching - use infer_sample method instead" - ) - - _LOGGER.debug(f"Running _infer for {self._model_name}") - result = self._infer(inputs or named_inputs, parameters, headers) - _LOGGER.debug(f"_infer for {self._model_name} finished") - async for item in result: - yield item - - async def _execute_infer(self, model_config, inputs_wrapped, outputs_wrapped, parameters, headers) -> Any: - # stream_infer siletly consumes all errors raised inside async_request_iterator and raises CancelledError - error_raised_inside_async_request_iterator = set() - try: - _LOGGER.debug(f"Sending InferRequest for {self._model_name}") - kwargs = self._get_infer_extra_args() - - async def async_request_iterator(errors): - _LOGGER.debug(f"Begin creating InferRequestHeader for {self._model_name}") - try: - yield { - "model_name": self._model_name, - "inputs": inputs_wrapped, - "outputs": outputs_wrapped, - "request_id": self._next_request_id, - "sequence_id": 0, - "sequence_start": True, - "sequence_end": True, - } - except Exception as e: - _LOGGER.error(f"Error occurred while creating InferRequestHeader for {self._model_name}") - errors.add(e) - raise e - _LOGGER.debug(f"End creating InferRequestHeader for {self._model_name}") - - response_iterator = self._infer_client.stream_infer( - inputs_iterator=async_request_iterator(error_raised_inside_async_request_iterator), - headers=headers, - **kwargs, - ) - _LOGGER.debug(f"End preparing InferRequest for {self._model_name}") - while True: - try: - try: - response = await asyncio.wait_for( - response_iterator.__anext__(), - self._inference_timeout_s, - ) - except asyncio.TimeoutError as e: - message = f"Timeout while waiting for model {self._model_name} to return next response {self._inference_timeout_s}s" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - result, error = response - _LOGGER.debug(f"Received InferResponse for {self._model_name}") - if error is not None: - raise error - else: - partial_output = { - output_spec.name: result.as_numpy(output_spec.name) for output_spec in model_config.outputs - } - yield partial_output - except StopAsyncIteration: - break - _LOGGER.debug(f"End receiving InferResponse for {self._model_name}") - - except asyncio.exceptions.TimeoutError as e: - # HTTP aio client raises asyncio.exceptions.TimeoutError for timeout errors - message = f"Timeout exceeded while running inference for {self._model_name}" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - except tritonclient.utils.InferenceServerException as e: - message = f"Error occurred on Triton Inference Server side:\n {e.message()}" - _LOGGER.error(message) - if "Deadline Exceeded" in e.message(): - # GRPC aio client raises InferenceServerException with message "Deadline Exceeded" - # for timeout errors - raise PyTritonClientTimeoutError(message) from e - else: - raise PyTritonClientInferenceServerError(message) from e - except asyncio.exceptions.CancelledError as e: - _LOGGER.error(f"CancelledError occurred while streaming inference for {self._model_name}") - # stream_infer siletly consumes all errors raised inside async_request_iterator and raises CancelledError - if len(error_raised_inside_async_request_iterator) > 0: - _LOGGER.error(f"Re-raising error raised inside async_request_iterator for {self._model_name} ") - raise error_raised_inside_async_request_iterator.pop() from None - else: - raise e - - async def _infer(self, inputs: _IOType, parameters, headers): - if self._model_ready: - _LOGGER.debug(f"Waiting for model {self._model_name} config") - await self._wait_and_init_model_config(self._init_timeout_s) - _LOGGER.debug(f"Model wait finished for {self._model_name}") - - _LOGGER.debug(f"Obtaining config for {self._model_name}") - model_config = await self.model_config - _LOGGER.debug(f"Model config for {self._model_name} obtained") - if not model_config.decoupled: - raise PyTritonClientInferenceServerError("Model config is coupled. Use AsyncioModelClient instead.") - - if isinstance(inputs, Tuple): - inputs = {input_spec.name: input_data for input_spec, input_data in zip(model_config.inputs, inputs)} - - inputs_wrapped = [] - for input_name, input_data in inputs.items(): - if isinstance(input_data, np.ndarray): - self._validate_input(input_name, input_data) - triton_dtype = tritonclient.utils.np_to_triton_dtype(input_data.dtype) - infer_input = self._triton_client_lib.InferInput(input_name, input_data.shape, triton_dtype) - infer_input.set_data_from_numpy(input_data) - input_wrapped = infer_input - inputs_wrapped.append(input_wrapped) - else: - raise PyTritonClientValueError( - f"Input {input_name} is not a numpy array. Got {type(input_data)} instead." - ) - - outputs_wrapped = [ - self._triton_client_lib.InferRequestedOutput(output_spec.name) for output_spec in model_config.outputs - ] - result = self._execute_infer(model_config, inputs_wrapped, outputs_wrapped, parameters, headers) - async for item in result: - yield item - - def _get_infer_extra_args(self): - if self._triton_url.scheme == "http": - raise PyTritonClientValueError("AsyncioDecoupledModelClient is only supported for grpc protocol") - warnings.warn( - f"tritonclient.aio.grpc doesn't support client_timeout parameter {self._inference_timeout_s} for infer_stream", - NotSupportedTimeoutWarning, - stacklevel=1, - ) - return {} - - -@contextlib.contextmanager -def _hub_context(): - hub = gevent.get_hub() - try: - yield hub - finally: - hub.destroy() - - -_INIT = "init" -_WAIT_FOR_MODEL = "wait_for_model" -_MODEL_CONFIG = "model_config" -_INFER_BATCH = "infer_batch" -_INFER_SAMPLE = "infer_sample" -_CLOSE = "close" - - -class FuturesModelClient: - """A client for interacting with a model deployed on the Triton Inference Server using concurrent.futures. - - This client allows asynchronous inference requests using a thread pool executor. It can be used to perform inference - on a model by providing input data and receiving the corresponding output data. The client can be used in a `with` - statement to ensure proper resource management. - - Example usage with context manager: - - ```python - with FuturesModelClient("localhost", "MyModel") as client: - result_future = client.infer_sample(input1=input1_data, input2=input2_data) - # do something else - print(result_future.result()) - ``` - - Usage without context manager: - - ```python - client = FuturesModelClient("localhost", "MyModel") - result_future = client.infer_sample(input1=input1_data, input2=input2_data) - # do something else - print(result_future.result()) - client.close() - ``` - """ - - def __init__( - self, - url: str, - model_name: str, - model_version: Optional[str] = None, - *, - max_workers: int = 128, - max_queue_size: int = 128, - non_blocking: bool = False, - init_timeout_s: Optional[float] = None, - inference_timeout_s: Optional[float] = None, - ): - """Initializes the FuturesModelClient for a given model. - - Args: - url: The Triton Inference Server url, e.g. `grpc://localhost:8001`. - model_name: The name of the model to interact with. - model_version: The version of the model to interact with. If None, the latest version will be used. - max_workers: The maximum number of threads that can be used to execute the given calls. If None, there is not limit on the number of threads. - max_queue_size: The maximum number of requests that can be queued. If None, there is not limit on the number of requests. - non_blocking: If True, the client will raise a PyTritonClientQueueFullError if the queue is full. If False, the client will block until the queue is not full. - init_timeout_s: Timeout in seconds for server and model being ready. If non passed default 60 seconds timeout will be used. - inference_timeout_s: Timeout in seconds for the single model inference request. If non passed default 60 seconds timeout will be used. - """ - self._url = url - self._model_name = model_name - self._model_version = model_version - self._threads = [] - self._max_workers = max_workers - self._max_queue_size = max_queue_size - self._non_blocking = non_blocking - - if self._max_workers is not None and self._max_workers <= 0: - raise ValueError("max_workers must be greater than 0") - if self._max_queue_size is not None and self._max_queue_size <= 0: - raise ValueError("max_queue_size must be greater than 0") - - kwargs = {} - if self._max_queue_size is not None: - kwargs["maxsize"] = self._max_queue_size - self._queue = Queue(**kwargs) - self._queue.put((_INIT, None, None)) - self._init_timeout_s = _DEFAULT_FUTURES_INIT_TIMEOUT_S if init_timeout_s is None else init_timeout_s - self._inference_timeout_s = inference_timeout_s - self._closed = False - self._lock = Lock() - self._existing_client = None - - def __enter__(self): - """Create context for using FuturesModelClient as a context manager.""" - return self - - def __exit__(self, exc_type, exc_value, traceback): - """Close resources used by FuturesModelClient instance when exiting from the context.""" - self.close() - - def close(self, wait=True): - """Close resources used by FuturesModelClient. - - This method closes the resources used by the FuturesModelClient instance, including the Triton Inference Server connections. - Once this method is called, the FuturesModelClient instance should not be used again. - - Args: - wait: If True, then shutdown will not return until all running futures have finished executing. - """ - if self._closed: - return - _LOGGER.debug("Closing FuturesModelClient.") - - self._closed = True - for _ in range(len(self._threads)): - self._queue.put((_CLOSE, None, None)) - - if wait: - _LOGGER.debug("Waiting for futures to finish.") - for thread in self._threads: - thread.join() - - def wait_for_model(self, timeout_s: float) -> Future: - """Returns a Future object which result will be None when the model is ready. - - Typical usage: - - ```python - with FuturesModelClient("localhost", "BERT") as client - future = client.wait_for_model(300.) - # do something else - future.result() # wait rest of timeout_s time - # till return None if model is ready - # or raise PyTritonClientTimeutError - ``` - - Args: - timeout_s: The maximum amount of time to wait for the model to be ready, in seconds. - - Returns: - A Future object which result is None when the model is ready. - """ - return self._execute( - name=_WAIT_FOR_MODEL, - request=timeout_s, - ) - - def model_config(self) -> Future: - """Obtain the configuration of the model deployed on the Triton Inference Server. - - This method returns a Future object that will contain the TritonModelConfig object when it is ready. - Client will wait init_timeout_s for the server to get into readiness state before obtaining the model configuration. - - Returns: - A Future object that will contain the TritonModelConfig object when it is ready. - - Raises: - PyTritonClientClosedError: If the FuturesModelClient is closed. - """ - return self._execute(name=_MODEL_CONFIG) - - def infer_sample( - self, - *inputs, - parameters: Optional[Dict[str, Union[str, int, bool]]] = None, - headers: Optional[Dict[str, Union[str, int, bool]]] = None, - **named_inputs, - ) -> Future: - """Run asynchronous inference on a single data sample and return a Future object. - - This method allows the user to perform inference on a single data sample by providing input data and receiving the - corresponding output data. The method returns a Future object that wraps a dictionary of inference results, where dictionary keys are output names. - - Example usage: - - ```python - with FuturesModelClient("localhost", "BERT") as client: - result_future = client.infer_sample(input1=input1_data, input2=input2_data) - # do something else - print(result_future.result()) - ``` - - Inference inputs can be provided either as positional or keyword arguments: - - ```python - future = client.infer_sample(input1, input2) - future = client.infer_sample(a=input1, b=input2) - ``` - - Args: - *inputs: Inference inputs provided as positional arguments. - parameters: Optional dictionary of inference parameters. - headers: Optional dictionary of HTTP headers for the inference request. - **named_inputs: Inference inputs provided as named arguments. - - Returns: - A Future object wrapping a dictionary of inference results, where dictionary keys are output names. - - Raises: - PyTritonClientClosedError: If the FuturesModelClient is closed. - """ - return self._execute( - name=_INFER_SAMPLE, - request=(inputs, parameters, headers, named_inputs), - ) - - def infer_batch( - self, - *inputs, - parameters: Optional[Dict[str, Union[str, int, bool]]] = None, - headers: Optional[Dict[str, Union[str, int, bool]]] = None, - **named_inputs, - ) -> Future: - """Run asynchronous inference on batched data and return a Future object. - - This method allows the user to perform inference on batched data by providing input data and receiving the corresponding output data. - The method returns a Future object that wraps a dictionary of inference results, where dictionary keys are output names. - - Example usage: - - ```python - with FuturesModelClient("localhost", "BERT") as client: - future = client.infer_batch(input1_sample, input2_sample) - # do something else - print(future.result()) - ``` - - Inference inputs can be provided either as positional or keyword arguments: - - ```python - future = client.infer_batch(input1, input2) - future = client.infer_batch(a=input1, b=input2) - ``` - - Mixing of argument passing conventions is not supported and will raise PyTritonClientValueError. - - Args: - *inputs: Inference inputs provided as positional arguments. - parameters: Optional dictionary of inference parameters. - headers: Optional dictionary of HTTP headers for the inference request. - **named_inputs: Inference inputs provided as named arguments. - - Returns: - A Future object wrapping a dictionary of inference results, where dictionary keys are output names. - - Raises: - PyTritonClientClosedError: If the FuturesModelClient is closed. - """ - return self._execute(name=_INFER_BATCH, request=(inputs, parameters, headers, named_inputs)) - - def _execute(self, name, request=None): - if self._closed: - raise PyTritonClientClosedError("FutureModelClient is already closed") - self._extend_thread_pool() - future = Future() - if self._non_blocking: - try: - self._queue.put_nowait((future, request, name)) - except Full as e: - raise PyTritonClientQueueFullError("Queue is full") from e - else: - kwargs = {} - if self._inference_timeout_s is not None: - kwargs["timeout"] = self._inference_timeout_s - try: - self._queue.put((future, request, name), **kwargs) - except Full as e: - raise PyTritonClientQueueFullError("Queue is full") from e - return future - - def _extend_thread_pool(self): - if self._closed: - return - - with self._lock: - if not self._queue.empty() and (self._max_workers is None or len(self._threads) < self._max_workers): - _LOGGER.debug("Create new thread") - thread = Thread(target=self._worker) - self._threads.append(thread) - thread.start() - else: - _LOGGER.debug("No need to create new thread") - - def _client_request_executor(self, client, request, name): - _LOGGER.debug(f"Running {name} for {self._model_name}") - if name == _INFER_SAMPLE: - inputs, parameters, headers, named_inputs = request - result = client.infer_sample( - *inputs, - parameters=parameters, - headers=headers, - **named_inputs, - ) - elif name == _INFER_BATCH: - inputs, parameters, headers, named_inputs = request - result = client.infer_batch( - *inputs, - parameters=parameters, - headers=headers, - **named_inputs, - ) - elif name == _MODEL_CONFIG: - result = client.model_config - elif name == _WAIT_FOR_MODEL: - timeout_s = request - result = client.wait_for_model(timeout_s) - else: - raise PyTritonClientValueError(f"Unknown request name {name}") - self._set_existing_client(client) - return result - - def _create_client(self, lazy_init): - _LOGGER.debug(f"Creating ModelClient lazy_init={lazy_init}") - return ModelClient( - self._url, - self._model_name, - self._model_version, - lazy_init=lazy_init, - init_timeout_s=self._init_timeout_s, - inference_timeout_s=self._inference_timeout_s, - ) - - def _set_existing_client(self, client): - if client._model_config is not None: - with self._lock: - if self._existing_client is None: - _LOGGER.debug("Setting existing client") - self._existing_client = client - - def _remove_existing_client(self, client): - if client is not None: - with self._lock: - if self._existing_client is not None: - if self._existing_client is client: - _LOGGER.debug("Resetting existing client") - self._existing_client = None - - def _worker(self): - _LOGGER.debug("Starting worker thread") - client = None - # Work around for AttributeError: '_Threadlocal' object has no attribute 'hub' - # gevent/_hub_local.py", line 77, in gevent._gevent_c_hub_local.get_hub_noargs - with _hub_context(): - while True: - future, request, name = self._queue.get() - if future == _CLOSE: - _LOGGER.debug("Closing thread") - self._queue.task_done() - break - if future == _INIT: - with self._lock: - if self._existing_client is None: - try: - _LOGGER.debug("Initial client creation") - client = self._create_client(False) - _LOGGER.debug("Setting existing client") - self._existing_client = client - except Exception as e: - _LOGGER.warning(f"Error {e} occurred during init for {self._model_name}") - continue - try: - if client is None: - with self._lock: - if self._existing_client is not None: - _LOGGER.debug("Creating new client from existing client") - client = ModelClient.from_existing_client(self._existing_client) - if client is None: - _LOGGER.debug("Creating new client") - client = self._create_client(name == _WAIT_FOR_MODEL) - with client: - self._set_existing_client(client) - while True: - try: - result = self._client_request_executor(client, request, name) - _LOGGER.debug(f"Finished {name} for {self._model_name}") - future.set_result(result) - self._queue.task_done() - except Exception as e: - _LOGGER.error(f"Error {e} occurred during {name} for {self._model_name}") - future.set_exception(e) - self._queue.task_done() - break - future, request, name = self._queue.get() - if future == _CLOSE: - _LOGGER.debug("Closing thread") - self._queue.task_done() - return - except Exception as e: - _LOGGER.error(f"Error {e} occurred during {name} for {self._model_name}") - future.set_exception(e) - self._queue.task_done() - finally: - self._remove_existing_client(client) - client = None - _LOGGER.debug("Finishing worker thread") diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/client/exceptions.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/client/exceptions.py deleted file mode 100644 index 6619b4a318b7a0f00fe84d0d9b07086a662764d6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/client/exceptions.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Exceptions thrown in pytriton.client module.""" - - -class PyTritonClientError(Exception): - """Generic pytriton client exception.""" - - def __init__(self, message: str): - """Initialize exception with message. - - Args: - message: Error message - """ - self._message = message - - def __str__(self) -> str: - """String representation of error. - - Returns: - Message content - """ - return self._message - - @property - def message(self): - """Get the exception message. - - Returns: - The message associated with this exception, or None if no message. - - """ - return self._message - - -class PyTritonClientValueError(PyTritonClientError): - """Generic error raised in case of incorrect values are provided into API.""" - - pass - - -class PyTritonClientInvalidUrlError(PyTritonClientValueError): - """Error raised when provided Triton Inference Server url is invalid.""" - - pass - - -class PyTritonClientTimeoutError(PyTritonClientError): - """Timeout occurred during communication with the Triton Inference Server.""" - - pass - - -class PyTritonClientModelUnavailableError(PyTritonClientError): - """Model with given name and version is unavailable on the given Triton Inference Server.""" - - pass - - -class PyTritonClientClosedError(PyTritonClientError): - """Error raised in case of trying to use closed client.""" - - pass - - -class PyTritonClientModelDoesntSupportBatchingError(PyTritonClientError): - """Error raised in case of trying to infer batch on model not supporting batching.""" - - pass - - -class PyTritonClientInferenceServerError(PyTritonClientError): - """Error raised in case of error on inference callable or Triton Inference Server side.""" - - pass - - -class PyTritonClientQueueFullError(PyTritonClientError): - """Error raised in case of trying to push request to full queue.""" - - pass diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/client/utils.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/client/utils.py deleted file mode 100644 index 2077754df6c25f2f9cb0f668d6be9cfb15e2a3f6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/client/utils.py +++ /dev/null @@ -1,384 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Utility module supporting model clients.""" - -import dataclasses -import enum -import logging -import socket -import sys -import time -import urllib -import warnings -from typing import Optional, Union - -import tritonclient.grpc -import tritonclient.http -import tritonclient.http.aio -from grpc import RpcError -from tritonclient.utils import InferenceServerException - -from pytriton.client.exceptions import PyTritonClientInvalidUrlError, PyTritonClientTimeoutError -from pytriton.client.warnings import NotSupportedTimeoutWarning -from pytriton.constants import DEFAULT_GRPC_PORT, DEFAULT_HTTP_PORT -from pytriton.model_config.parser import ModelConfigParser - -_LOGGER = logging.getLogger(__name__) - -_TritonSyncClientType = Union[tritonclient.grpc.InferenceServerClient, tritonclient.http.InferenceServerClient] - -_DEFAULT_NETWORK_TIMEOUT_S = 60.0 # 1min -_DEFAULT_WAIT_FOR_SERVER_READY_TIMEOUT_S = 60.0 # 1min -_DEFAULT_WAIT_FOR_MODEL_TIMEOUT_S = 300.0 # 5min - -LATEST_MODEL_VERSION = "" - - -# Special value for model_version argument. If model_version is None, the latest version of the model is returned. - - -class ModelState(enum.Enum): - """Describe model state in Triton. - - Attributes: - LOADING: Loading of model - UNLOADING: Unloading of model - UNAVAILABLE: Model is missing or could not be loaded - READY: Model is ready for inference - """ - - LOADING = "LOADING" - UNLOADING = "UNLOADING" - UNAVAILABLE = "UNAVAILABLE" - READY = "READY" - - -def parse_http_response(models): - """Parse model repository index response from Triton Inference Server for HTTP.""" - models_states = {} - _LOGGER.debug("Parsing model repository index entries:") - for model in models: - _LOGGER.debug(f" name={model.get('name')} version={model.get('version')} state={model.get('state')}") - if not model.get("version"): - continue - - model_state = ModelState(model["state"]) if model.get("state") else ModelState.LOADING - models_states[(model["name"], model["version"])] = model_state - - return models_states - - -def parse_grpc_response(models): - """Parse model repository index response from Triton Inference Server for GRCP.""" - models_states = {} - _LOGGER.debug("Parsing model repository index entries:") - for model in models: - _LOGGER.debug(f" name={model.name} version={model.version} state={model.state}") - if not model.version: - continue - - model_state = ModelState(model.state) if model.state else ModelState.LOADING - models_states[(model.name, model.version)] = model_state - - return models_states - - -def get_model_state( - client: _TritonSyncClientType, - model_name: str, - model_version: Optional[str] = None, -) -> ModelState: - """Obtains state of the model deployed in Triton Inference Server. - - Args: - client: Triton Inference Server client to use for communication - model_name: name of the model which state we're requesting. - model_version: - version of the model which state we're requesting. - If model_version is None state of latest model is returned. - The latest versions of the model are the numerically greatest version numbers. - - Returns: - Model state. _ModelState.UNAVAILABLE is returned in case if model with given name and version is not found. - - """ - repository_index = client.get_model_repository_index() - if isinstance(repository_index, list): - models_states = parse_http_response(models=repository_index) - else: - models_states = parse_grpc_response(models=repository_index.models) - - if model_version is None: - requested_model_states = { - version: state for (name, version), state in models_states.items() if name == model_name - } - if not requested_model_states: - return ModelState.UNAVAILABLE - else: - requested_model_states = sorted(requested_model_states.items(), key=lambda item: int(item[0])) - _latest_version, latest_version_state = requested_model_states[-1] - return latest_version_state - else: - state = models_states.get((model_name, model_version), ModelState.UNAVAILABLE) - return state - - -def get_model_config( - client: _TritonSyncClientType, - model_name: str, - model_version: Optional[str] = None, - timeout_s: Optional[float] = None, -): - """Obtain configuration of model deployed on the Triton Inference Server. - - Function waits for server readiness. - - Typical use: - - client = tritonclient.grpc.Client("localhost:8001") - model_config = get_model_config(client, "MyModel", "1", 60.0) - model_config = get_model_config(client, "MyModel") - - Args: - client: Triton Inference Server client to use for communication - model_name: name of the model which configuration we're requesting. - model_version: - version of the model which configuration we're requesting. - If model_version is None configuration of the latest model is returned. - The latest versions of the model are the numerically greatest version numbers. - timeout_s: timeout to finish model configuration obtain. Default value is 300.0 s. - - Returns: - Configuration of requested model. - - Raises: - PyTritonClientTimeoutError: If obtain of model configuration didn't finish before given timeout. - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - """ - wait_for_model_ready(client, model_name=model_name, model_version=model_version, timeout_s=timeout_s) - - model_version = model_version or "" - - _LOGGER.debug(f"Obtaining model {model_name} config") - if isinstance(client, tritonclient.grpc.InferenceServerClient): - response = client.get_model_config(model_name, model_version, as_json=True) - model_config = response["config"] - else: - model_config = client.get_model_config(model_name, model_version) - model_config = ModelConfigParser.from_dict(model_config) - _LOGGER.debug(f"Model config: {model_config}") - return model_config - - -def _warn_on_too_big_network_timeout(client: _TritonSyncClientType, timeout_s: float): - if isinstance(client, tritonclient.http.InferenceServerClient): - connection_pool = client._client_stub._connection_pool - network_reldiff_s = (connection_pool.network_timeout - timeout_s) / timeout_s - connection_reldiff_s = (connection_pool.connection_timeout - timeout_s) / timeout_s - rtol = 0.001 - if network_reldiff_s > rtol or connection_reldiff_s > rtol: - warnings.warn( - "Client network and/or connection timeout is smaller than requested timeout_s. This may cause unexpected behavior. " - f"network_timeout={connection_pool.network_timeout} " - f"connection_timeout={connection_pool.connection_timeout} " - f"timeout_s={timeout_s}", - NotSupportedTimeoutWarning, - stacklevel=1, - ) - - -def wait_for_server_ready( - client: _TritonSyncClientType, - timeout_s: Optional[float] = None, -): - """Waits for Triton Inference Server to be ready. - - Typical use: - - client = tritonclient.http.Client("localhost:8001") - wait_for_server_ready(client, timeout_s=600.0) - - Args: - client: Triton Inference Server client to use for communication - timeout_s: timeout to server get into readiness state. Default value is 60.0 s. - - Raises: - PyTritonClientTimeoutError: If obtain of model configuration didn't finish before given timeout. - """ - timeout_s = timeout_s if timeout_s is not None else _DEFAULT_WAIT_FOR_SERVER_READY_TIMEOUT_S - should_finish_before_s = time.time() + timeout_s - _warn_on_too_big_network_timeout(client, timeout_s) - - def _is_server_ready(): - try: - return client.is_server_ready() and client.is_server_live() - except InferenceServerException: - return False - except (RpcError, ConnectionError, socket.gaierror): # GRPC and HTTP clients raises these errors - return False - except Exception as e: - _LOGGER.exception(f"Exception while checking server readiness: {e}") - raise e - - timeout_s = max(0.0, should_finish_before_s - time.time()) - _LOGGER.debug(f"Waiting for server to be ready (timeout={timeout_s})") - is_server_ready = _is_server_ready() - while not is_server_ready: - time.sleep(min(1.0, timeout_s)) - is_server_ready = _is_server_ready() - if not is_server_ready and time.time() >= should_finish_before_s: - raise PyTritonClientTimeoutError("Waiting for server to be ready timed out.") - - -def wait_for_model_ready( - client: _TritonSyncClientType, - model_name: str, - model_version: Optional[str] = None, - timeout_s: Optional[float] = None, -): - """Wait for Triton Inference Server to be ready. - - Args: - client: Triton Inference Server client to use for communication. - model_name: name of the model to wait for readiness. - model_version: - version of the model to wait for readiness. - If model_version is None waiting for latest version of the model. - The latest versions of the model are the numerically greatest version numbers. - timeout_s: timeout to server and model get into readiness state. Default value is 300.0 s. - - Raises: - PyTritonClientTimeoutError: If server readiness didn't finish before given timeout. - """ - model_version = model_version or "" - model_version_msg = model_version or LATEST_MODEL_VERSION - timeout_s = timeout_s if timeout_s is not None else _DEFAULT_WAIT_FOR_MODEL_TIMEOUT_S - should_finish_before_s = time.time() + timeout_s - - wait_for_server_ready(client, timeout_s=timeout_s) - timeout_s = max(0.0, should_finish_before_s - time.time()) - _LOGGER.debug(f"Waiting for model {model_name}/{model_version_msg} to be ready (timeout={timeout_s})") - is_model_ready = client.is_model_ready(model_name, model_version) - while not is_model_ready: - time.sleep(min(1.0, timeout_s)) - is_model_ready = client.is_model_ready(model_name, model_version) - - if not is_model_ready and time.time() >= should_finish_before_s: - raise PyTritonClientTimeoutError( - f"Waiting for model {model_name}/{model_version_msg} to be ready timed out." - ) - - -def create_client_from_url(url: str, network_timeout_s: Optional[float] = None) -> _TritonSyncClientType: # type: ignore - """Create Triton Inference Server client. - - Args: - url: url of the server to connect to. - If url doesn't contain scheme (e.g. "localhost:8001") http scheme is added. - If url doesn't contain port (e.g. "localhost") default port for given scheme is added. - network_timeout_s: timeout for client commands. Default value is 60.0 s. - - Returns: - Triton Inference Server client. - - Raises: - PyTritonClientInvalidUrlError: If provided Triton Inference Server url is invalid. - """ - url = TritonUrl.from_url(url) - triton_client_lib = {"grpc": tritonclient.grpc, "http": tritonclient.http}[url.scheme] - - if url.scheme == "grpc": - # by default grpc client has very large number of timeout, thus we want to make it equal to http client timeout - network_timeout_s = _DEFAULT_NETWORK_TIMEOUT_S if network_timeout_s is None else network_timeout_s - warnings.warn( - f"tritonclient.grpc doesn't support timeout for other commands than infer. Ignoring network_timeout: {network_timeout_s}.", - NotSupportedTimeoutWarning, - stacklevel=1, - ) - - triton_client_init_kwargs = {} - if network_timeout_s is not None: - triton_client_init_kwargs.update( - **{ - "grpc": {}, - "http": {"connection_timeout": network_timeout_s, "network_timeout": network_timeout_s}, - }[url.scheme] - ) - - _LOGGER.debug(f"Creating InferenceServerClient for {url.with_scheme} with {triton_client_init_kwargs}") - return triton_client_lib.InferenceServerClient(url.without_scheme, **triton_client_init_kwargs) - - -@dataclasses.dataclass -class TritonUrl: - """TritonUrl class for parsing Triton Inference Server url. - - Attributes: - scheme: scheme of the url (http or grpc) - hostname: hostname of the url - port: port of the url - - Examples: - triton_url = TritonUrl.from_url("localhost:8000") - triton_url.with_scheme - >>> "http://localhost:8000" - triton_url.without_scheme - >>> "localhost:8000" - triton_url.scheme, triton_url.hostname, triton_url.port - >>> ("http", "localhost", 8000) - """ - - scheme: str - hostname: str - port: int - - @classmethod - def from_url(cls, url): - """Parse triton url and create TritonUrl instance. - - Returns: - TritonUrl object with scheme, hostname and port. - """ - if not isinstance(url, str): - raise PyTritonClientInvalidUrlError(f"Invalid url {url}. Url must be a string.") - try: - parsed_url = urllib.parse.urlparse(url) - # change in py3.9+ - # https://github.com/python/cpython/commit/5a88d50ff013a64fbdb25b877c87644a9034c969 - if sys.version_info < (3, 9) and not parsed_url.scheme and "://" in parsed_url.path: - raise ValueError(f"Invalid url {url}. Only grpc and http are supported.") - if (not parsed_url.scheme and "://" not in parsed_url.path) or ( - sys.version_info >= (3, 9) and parsed_url.scheme and not parsed_url.netloc - ): - _LOGGER.debug(f"Adding http scheme to {url}") - parsed_url = urllib.parse.urlparse(f"http://{url}") - - scheme = parsed_url.scheme.lower() - if scheme not in ["grpc", "http"]: - raise ValueError(f"Invalid scheme {scheme}. Only grpc and http are supported.") - - port = parsed_url.port or {"grpc": DEFAULT_GRPC_PORT, "http": DEFAULT_HTTP_PORT}[scheme] - except ValueError as e: - raise PyTritonClientInvalidUrlError(f"Invalid url {url}") from e - return cls(scheme, parsed_url.hostname, port) - - @property - def with_scheme(self): - """Get Triton Inference Server url with scheme.""" - return f"{self.scheme}://{self.hostname}:{self.port}" - - @property - def without_scheme(self): - """Get Triton Inference Server url without scheme.""" - return f"{self.hostname}:{self.port}" diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/client/warnings.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/client/warnings.py deleted file mode 100644 index 7e121689e5d311f747037c530c290d1606839cbe..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/client/warnings.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Warnings for pytriton module.""" - - -class PyTritonWarning(UserWarning): - """Base warning for pytriton module.""" - - pass - - -class NotSupportedTimeoutWarning(PyTritonWarning): - """A warning for client, which doesn't support timeout configuration.""" - - pass diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/constants.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/constants.py deleted file mode 100644 index 49f8723c8cfc789af49edd1e712c175220361946..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/constants.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# noqa: D104 -"""Constants for pytriton.""" - -import os -import pathlib - -DEFAULT_HTTP_PORT = 8000 -DEFAULT_GRPC_PORT = 8001 -DEFAULT_METRICS_PORT = 8002 -TRITON_LOCAL_IP = "127.0.0.1" -TRITON_CONTEXT_FIELD_NAME = "triton_context" -TRITON_PYTHON_BACKEND_INTERPRETER_DIRNAME = "python_backend_interpreter" -DEFAULT_TRITON_STARTUP_TIMEOUT_S = 120 -CREATE_TRITON_CLIENT_TIMEOUT_S = 10 - -__DEFAULT_PYTRITON_HOME = os.path.join(os.getenv("XDG_CACHE_HOME", "$HOME/.cache"), "pytriton") -__PYTRITON_HOME = os.path.expanduser(os.path.expandvars(os.getenv("PYTRITON_HOME", __DEFAULT_PYTRITON_HOME))) -PYTRITON_HOME = pathlib.Path(__PYTRITON_HOME).resolve().absolute() diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/decorators.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/decorators.py deleted file mode 100644 index b8f8603f62c9d5b783bf65366cb0d5256d573598..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/decorators.py +++ /dev/null @@ -1,678 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Inference callable decorators.""" - -import collections -import dataclasses -import inspect -import itertools -import operator -import typing -from bisect import bisect_left -from collections.abc import MutableMapping -from typing import Callable, Dict, List, NamedTuple, Optional, Tuple, Union - -import numpy as np -import wrapt - -from pytriton.constants import TRITON_CONTEXT_FIELD_NAME -from pytriton.exceptions import PyTritonBadParameterError, PyTritonRuntimeError, PyTritonValidationError -from pytriton.model_config.triton_model_config import TritonModelConfig -from pytriton.proxy.data import _serialize_byte_tensor -from pytriton.proxy.telemetry import start_span_from_span - - -class _WrappedWithWrapper(NamedTuple): - wrapped: Optional[Callable] - wrapper: Optional[Callable] - - -InputNames = typing.List[str] -InferenceRequest = typing.Dict[str, np.ndarray] -InferenceRequests = typing.Union[typing.List[InferenceRequest], typing.Tuple[InferenceRequest, ...]] -InferenceResult = typing.Dict[str, np.ndarray] -InferenceResults = typing.Union[typing.List[InferenceResult], typing.Tuple[InferenceResult, ...]] - - -def get_inference_request_batch_size(inference_request: InferenceRequest) -> int: - """Get batch size from triton request. - - Args: - inference_request (InferenceRequest): Triton request. - - Returns: - int: Batch size. - """ - first_input_value = next(iter(inference_request.values())) - batch_size, *_dims = first_input_value.shape - return batch_size - - -def _get_wrapt_stack(wrapped) -> List[_WrappedWithWrapper]: - """Returns stack of wrapped functions with wrappers applied to inference callable.""" - stack = [] - infer_callable = wrapped - while infer_callable is not None: - stack.append(_WrappedWithWrapper(infer_callable, getattr(infer_callable, "_self_wrapper", None))) - infer_callable = getattr(infer_callable, "__wrapped__", None) - - return stack - - -class ModelConfigDict(MutableMapping): - """Dictionary for storing model configs for inference callable.""" - - def __init__(self): - """Create ModelConfigDict object.""" - self._data: Dict[str, TritonModelConfig] = {} - self._keys: List[Callable] = [] - - def __getitem__(self, infer_callable: Callable) -> TritonModelConfig: - """Get model config for inference callable.""" - key = self._get_model_config_key(infer_callable) - return self._data[key] - - def __setitem__(self, infer_callable: Callable, item: TritonModelConfig): - """Set model config for inference callable.""" - self._keys.append(infer_callable) - key = self._get_model_config_key(infer_callable) - self._data[key] = item - - def __delitem__(self, infer_callable: Callable): - """Delete model config for inference callable.""" - key = self._get_model_config_key(infer_callable) - del self._data[key] - - def __len__(self): - """Get number of inference callable keys.""" - return len(self._data) - - def __iter__(self): - """Iterate over inference callable keys.""" - return iter(self._keys) - - @staticmethod - def _get_model_config_key(infer_callable: Callable) -> str: - """Prepares TritonModelConfig dictionary key for function/callable.""" - dict_key = infer_callable - if inspect.ismethod(dict_key) and dict_key.__name__ == "__call__": - dict_key = dict_key.__self__ - return str(dict_key) - - -@dataclasses.dataclass -class TritonContext: - """Triton context definition class.""" - - model_configs: ModelConfigDict = dataclasses.field(default_factory=ModelConfigDict) - - -def get_triton_context(wrapped, instance) -> TritonContext: - """Retrieves triton context from callable. - - It is used in @triton_context to get triton context registered by triton binding in inference callable. - If you use @triton_context decorator you do not need this function. - """ - caller = instance or wrapped - if not hasattr(caller, "__triton_context__"): - raise PyTritonValidationError("Wrapped function or object must bound with triton to get __triton_context__") - return caller.__triton_context__ - - -def get_model_config(wrapped, instance) -> TritonModelConfig: - """Retrieves instance of TritonModelConfig from callable. - - It is internally used in convert_output function to get output list from model. - You can use this in custom decorators if you need access to model_config information. - If you use @triton_context decorator you do not need this function (you can get model_config directly - from triton_context passing function/callable to dictionary getter). - """ - return get_triton_context(wrapped, instance).model_configs[wrapped] - - -def convert_output( - outputs: Union[Dict, List, Tuple], wrapped=None, instance=None, model_config: Optional[TritonModelConfig] = None -): - """Converts output from tuple ot list to dictionary. - - It is utility function useful for mapping output list into dictionary of outputs. - Currently, it is used in @sample and @batch decorators (we assume that user can return list or tuple of outputs - instead of dictionary if this list matches output list in model config (size and order). - """ - if isinstance(outputs, dict): - return outputs - elif isinstance(outputs, (list, tuple)): - if model_config is None: - model_config = get_model_config(wrapped, instance) - if len(outputs) != len(model_config.outputs): - raise PyTritonValidationError("Outputs length different than config outputs length") - outputs = {config_output.name: output for config_output, output in zip(model_config.outputs, outputs)} - return outputs - else: - raise PyTritonValidationError(f"Unsupported output type {type(outputs)}.") - - -@wrapt.decorator -def sample(wrapped, instance, args, kwargs): - """Decorator is used for non-batched inputs to convert from one element list of requests to request kwargs. - - Decorator takes first request and convert it into named inputs. - Useful with non-batching models - instead of one element list of request, we will get named inputs - `kwargs`. - """ - kwargs.update(args[0][0]) - outputs = wrapped(*args[1:], **kwargs) - outputs = convert_output(outputs, wrapped, instance) - return [outputs] - - -@wrapt.decorator -def batch(wrapped, instance, args, kwargs): - """Decorator for converting list of request dicts to dict of input batches. - - Converts list of request dicts to dict of input batches. - It passes **kwargs to inference callable where each named input contains numpy array with batch of requests - received by Triton server. - We assume that each request has the same set of keys (you can use group_by_keys decorator before - using @batch decorator if your requests may have different set of keys). - - Raises: - PyTritonValidationError: If the requests have different set of keys. - ValueError: If the output tensors have different than expected batch sizes. Expected batch size is - calculated as a sum of batch sizes of all requests. - """ - telemetry_name = "pytriton-batch-decorator-span" - - req_list = args[0] - input_names = req_list[0].keys() - - for req_dict2 in req_list[1:]: - if input_names != req_dict2.keys(): - raise PyTritonValidationError("Cannot batch requests with different set of inputs keys") - - inputs = {} - for model_input in input_names: - concatenated_input_data = np.concatenate([req[model_input] for req in req_list]) - inputs[model_input] = concatenated_input_data - - args = args[1:] - new_kwargs = dict(kwargs) - new_kwargs.update(inputs) - spans = [start_span_from_span(request.span, telemetry_name) for request in req_list if request.span is not None] - try: - outputs = wrapped(*args, **new_kwargs) - finally: - for span in spans: - span.end() - - def _split_result(_result): - outputs = convert_output(_result, wrapped, instance) - output_names = outputs.keys() - - requests_total_batch_size = sum(get_inference_request_batch_size(req) for req in req_list) - not_matching_tensors_shapes = { - output_name: output_tensor.shape - for output_name, output_tensor in outputs.items() - if output_tensor.shape[0] != requests_total_batch_size - } - if not_matching_tensors_shapes: - raise ValueError( - f"Received output tensors with different batch sizes: {', '.join(': '.join(map(str, item)) for item in not_matching_tensors_shapes.items())}. " - f"Expected batch size: {requests_total_batch_size}. " - ) - - out_list = [] - start_idx = 0 - for request in req_list: - # get batch_size of first input for each request - assume that all inputs have same batch_size - request_batch_size = get_inference_request_batch_size(request) - req_output_dict = {} - for _output_ind, output_name in enumerate(output_names): - req_output = outputs[output_name][start_idx : start_idx + request_batch_size, ...] - req_output_dict[output_name] = req_output - out_list.append(req_output_dict) - start_idx += request_batch_size - return out_list - - if inspect.isgenerator(outputs): - return (_split_result(_result) for _result in outputs) - else: - return _split_result(outputs) - - -def group_by_values(*keys, pad_fn: typing.Optional[typing.Callable[[InferenceRequests], InferenceRequests]] = None): - """Decorator for grouping requests by values of selected keys. - - This function splits a batch into multiple sub-batches based on the specified keys values and - calls the decorated function with each sub-batch. This is particularly useful when working with models - that require dynamic parameters sent by the user. - - For example, given an input of the form: - - ```python - {"sentences": [b"Sentence1", b"Sentence2", b"Sentence3"], "param1": [1, 1, 2], "param2": [1, 1, 1]} - ``` - - Using @group_by_values("param1", "param2") will split the batch into two sub-batches: - - ```python - [ - {"sentences": [b"Sentence1", b"Sentence2"], "param1": [1, 1], "param2": [1, 1]}, - {"sentences": [b"Sentence3"], "param1": [2], "param2": [1]} - ] - ``` - - This decorator should be used after the @batch decorator. - - Example usage: - ```python - @batch - @group_by_values("param1", "param2") - def infer_fun(**inputs): - ... - return outputs - ``` - - Args: - *keys: List of keys to group by. - pad_fn: Optional function to pad the batch to the same size before merging again to a single batch. - - Returns: - The decorator function. - """ - - def value_to_key(value): - if isinstance(value, np.ndarray): - if value.dtype == np.object_ or value.dtype.type == np.bytes_: - return _serialize_byte_tensor(value) - else: - return value.tobytes() - return value - - def _get_sort_key_for_sample(_request, _sample_idx: int): - return tuple(value_to_key(_request[_key][_sample_idx]) for _key in keys) - - def _group_request(_request: InferenceRequest, _batch_size: int): - idx_inputs = [(sample_idx, _get_sort_key_for_sample(_request, sample_idx)) for sample_idx in range(_batch_size)] - idx_inputs.sort(key=operator.itemgetter(1)) - for _, group in itertools.groupby(idx_inputs, key=operator.itemgetter(1)): - _samples_idxes, _ = zip(*group) - grouped_request = {input_name: value[_samples_idxes, ...] for input_name, value in _request.items()} - yield _samples_idxes, grouped_request - - @wrapt.decorator - def _wrapper(wrapped, instance, args, kwargs): - wrappers_stack = [ - callable_with_wrapper.wrapper - for callable_with_wrapper in _get_wrapt_stack(wrapped) - if callable_with_wrapper.wrapper is not None - ] - if batch in wrappers_stack: - raise PyTritonRuntimeError("The @group_by_values decorator must be used after the @batch decorator.") - - request = {k: v for k, v in kwargs.items() if k not in _SPECIAL_KEYS} - other_kwargs = {k: v for k, v in kwargs.items() if k in _SPECIAL_KEYS} - - batch_size = get_inference_request_batch_size(request) - sample_indices_with_interim_result = [] - for sample_indices, _grouped_sub_request in _group_request(request, batch_size): - interim_result = wrapped(*args, **_grouped_sub_request, **other_kwargs) - sample_indices_with_interim_result.append((sample_indices, interim_result)) - - if pad_fn is not None: - indices, results = tuple(map(tuple, zip(*sample_indices_with_interim_result))) - results = pad_fn(results) - sample_indices_with_interim_result = tuple(zip(indices, results)) - - _, first_result_data = sample_indices_with_interim_result[0] - result = { - output_name: np.zeros((batch_size,) + data.shape[1:], dtype=data.dtype) - for output_name, data in first_result_data.items() - } - for indices, results in sample_indices_with_interim_result: - for output_name, data in results.items(): - result[output_name][indices, ...] = data - - return result - - return _wrapper - - -class ConstantPadder: - """Padder that pads the given batches with a constant value.""" - - def __init__(self, pad_value=0): - """Initialize the padder. - - Args: - pad_value (int, optional): Padding value. Defaults to 0. - """ - self.pad_value = pad_value - - def __call__(self, batches_list: InferenceResults) -> InferenceResults: - """Pad the given batches with the specified value to pad size enabling further batching to single arrays. - - Args: - batches_list (List[Dict[str, np.ndarray]]): List of batches to pad. - - Returns: - List[Dict[str, np.ndarray]]: List of padded batches. - - Raises: - PyTritonRuntimeError: If the input arrays for a given input name have different dtypes. - """ - - def _get_padded_shape(_batches: List[np.ndarray]) -> Tuple[int, ...]: - """Get the shape of the padded array without batch axis.""" - return tuple(np.max([batch.shape[1:] for batch in _batches if batch is not None], axis=0)) - - def _get_padded_dtype(_batches: List[np.ndarray]) -> np.dtype: - dtypes = [batch.dtype for batch in _batches if batch is not None] - result_dtype = dtypes[0] - - if not all(dtype.kind == result_dtype.kind for dtype in dtypes): - raise PyTritonRuntimeError("All input arrays for given input name must have the same dtype.") - - # for bytes (encoded string) or unicode string need to obtain the max length - if result_dtype.kind in "SU": - order_and_kind = result_dtype.str[:2] - max_len = max([int(dtype.str[2:]) for dtype in dtypes]) - result_dtype = f"{order_and_kind}{max_len}" - else: - if not all(dtype == result_dtype for dtype in dtypes): - raise PyTritonRuntimeError("All input arrays for given input name must have the same dtype.") - - return np.dtype(result_dtype) - - input_names = list( - collections.OrderedDict.fromkeys(input_name for batch in batches_list for input_name in batch.keys()) - ) - batches_by_name = {input_name: [batch.get(input_name) for batch in batches_list] for input_name in input_names} - for input_batches in batches_by_name.values(): - result_shape, result_dtype = _get_padded_shape(input_batches), _get_padded_dtype(input_batches) - for batch_idx, batch in enumerate(input_batches): - if batch is not None: - input_batches[batch_idx] = np.pad( - batch, - [(0, 0)] + [(0, b - a) for a, b in zip(batch.shape[1:], result_shape)], - mode="constant", - constant_values=self.pad_value if result_dtype.kind not in ["S", "U", "O"] else b"", - ).astype(result_dtype) - - return [ - {name: batches[batch_idx] for name, batches in batches_by_name.items() if batches[batch_idx] is not None} - for batch_idx in range(len(batches_list)) - ] - - -@wrapt.decorator -def group_by_keys(wrapped, instance, args, kwargs): - """Group by keys. - - Decorator prepares groups of requests with the same set of keys and calls wrapped function - for each group separately (it is convenient to use this decorator before batching, because the batching decorator - requires consistent set of inputs as it stacks them into batches). - """ - inputs = args[0] - idx_inputs = [(idx, tuple(sorted(input.keys())), input) for idx, input in enumerate(inputs)] - idx_inputs.sort(key=operator.itemgetter(1)) - idx_groups_res = [] - for _, group in itertools.groupby(idx_inputs, key=operator.itemgetter(1)): - idx, _key, sample_list = zip(*group) - args = (list(sample_list),) + args[1:] - out = wrapped(*args, **kwargs) - idx_groups_res.extend(zip(idx, out)) - - idx_groups_res.sort(key=operator.itemgetter(0)) - res_flat = [r[1] for r in idx_groups_res] - return res_flat - - -def fill_optionals(**defaults): - """This decorator ensures that any missing inputs in requests are filled with default values specified by the user. - - Default values should be NumPy arrays without batch axis. - - If you plan to group requests ex. with - [@group_by_keys][pytriton.decorators.group_by_keys] or - [@group_by_vales][pytriton.decorators.group_by_values] decorators - provide default values for optional parameters at the beginning of decorators stack. - The other decorators can then group requests into bigger batches resulting in a better model performance. - - Typical use: - ```python - @fill_optionals() - @group_by_keys() - @batch - def infer_fun(**inputs): - ... - return outputs - ``` - - Args: - defaults: keyword arguments containing default values for missing inputs - - - If you have default values for some optional parameter it is good idea to provide them at the very beginning, - so the other decorators (e.g. @group_by_keys) can make bigger consistent groups. - """ - - def _verify_defaults(model_config: TritonModelConfig): - inputs = {spec.name: spec for spec in model_config.inputs} - not_matching_default_names = sorted(set(defaults) - set(inputs)) - if not_matching_default_names: - raise PyTritonBadParameterError(f"Could not found {', '.join(not_matching_default_names)} inputs") - - non_numpy_items = {k: v for k, v in defaults.items() if not isinstance(v, np.ndarray)} - if non_numpy_items: - raise PyTritonBadParameterError( - f"Could not use {', '.join([f'{k}={v}' for k, v in non_numpy_items.items()])} defaults " - "as they are not NumPy arrays" - ) - - not_matching_dtypes = {k: (v.dtype, inputs[k].dtype) for k, v in defaults.items() if v.dtype != inputs[k].dtype} - if not_matching_dtypes: - non_matching_dtypes_str_list = [ - f"{name}: dtype={have_dtype} expected_dtype={expected_dtype}" - for name, (have_dtype, expected_dtype) in not_matching_dtypes.items() - ] - raise PyTritonBadParameterError( - f"Could not use {', '.join(non_matching_dtypes_str_list)} " - f"defaults as they have different than input signature dtypes" - ) - - def _shape_match(_have_shape, _expected_shape): - return len(_have_shape) == len(_expected_shape) and all( - e == -1 or h == e for h, e in zip(_have_shape, _expected_shape) - ) - - not_matching_shapes = { - k: (v.shape, inputs[k].shape) for k, v in defaults.items() if not _shape_match(v.shape, inputs[k].shape) - } - if not_matching_shapes: - non_matching_shapes_str_list = [ - f"{name}: shape={have_shape} expected_shape={expected_shape}" - for name, (have_shape, expected_shape) in not_matching_shapes.items() - ] - raise PyTritonBadParameterError( - f"Could not use {', '.join(non_matching_shapes_str_list)} " - f"defaults as they have different than input signature shapes" - ) - - @wrapt.decorator - def _wrapper(wrapped, instance, args, kwargs): - model_config = get_model_config(wrapped, instance) - _verify_defaults(model_config) - # verification if not after group wrappers is in group wrappers - - (requests,) = args - - model_supports_batching = model_config.batching - for request in requests: - batch_size = get_inference_request_batch_size(request) if model_supports_batching else None - for default_key, default_value in defaults.items(): - if default_key in request: - continue - - if model_supports_batching: - ones_reps = (1,) * default_value.ndim # repeat once default_value on each axis - axis_reps = (batch_size,) + ones_reps # ... except on batch axis. we repeat it batch_size times - default_value = np.tile(default_value, axis_reps) - - request[default_key] = default_value - return wrapped(*args, **kwargs) - - return _wrapper - - -@wrapt.decorator -def triton_context(wrapped, instance, args, kwargs): - """Adds triton context. - - It gives you additional argument passed to the function in **kwargs called 'triton_context'. - You can read model config from it and in the future possibly have some interaction with triton. - """ - kwargs[TRITON_CONTEXT_FIELD_NAME] = get_triton_context(wrapped, instance) - return wrapped(*args, **kwargs) - - -@wrapt.decorator -def pad_batch(wrapped, instance, args, kwargs): - """Add padding to the inputs batches. - - Decorator appends last rows to the inputs multiple times to get desired batch size (preferred batch size or - max batch size from model config whatever is closer to current input size). - """ - inputs = {k: v for k, v in kwargs.items() if k != "__triton_context__"} - first_input = next(iter(inputs.values())) - config = get_model_config(wrapped, instance) - batch_sizes = ( - [] - if (config.batcher is None or config.batcher.preferred_batch_size is None) - else sorted(config.batcher.preferred_batch_size) - ) - batch_sizes.append(config.max_batch_size) - batch_size = batch_sizes[bisect_left(batch_sizes, first_input.shape[0])] - - new_inputs = { - input_name: np.repeat( - input_array, - np.concatenate([ - np.ones(input_array.shape[0] - 1), - np.array([batch_size - input_array.shape[0] + 1]), - ]).astype(np.int64), - axis=0, - ) - for input_name, input_array in inputs.items() - } - - kwargs.update(new_inputs) - return wrapped(*args, **kwargs) - - -_SPECIAL_KEYS = ["__triton_context__"] - - -def first_value(*keys: str, squeeze_single_values=True, strict: bool = True): - """This decorator overwrites selected inputs with first element of the given input. - - It can be used in two ways: - - 1. Wrapping a single request inference callable by chaining with @batch decorator: - ```python - @batch - @first_value("temperature") - def infer_fn(**inputs): - ... - return result - ``` - - 2. Wrapping a multiple requests inference callable: - ```python - @first_value("temperature") - def infer_fn(requests): - ... - return results - ``` - - By default, the decorator squeezes single value arrays to scalars. - This behavior can be disabled by setting the `squeeze_single_values` flag to False. - - By default, the decorator checks the equality of the values on selected values. - This behavior can be disabled by setting the `strict` flag to False. - - Wrapper can only be used with models that support batching. - - Args: - keys: The input keys selected for conversion. - squeeze_single_values: squeeze single value ND array to scalar values. Defaults to True. - strict: enable checking if all values on single selected input of request are equal. Defaults to True. - - Raises: - PyTritonRuntimeError: if not all values on a single selected input of the request are equal - and the strict flag is set to True. Additionally, if the decorator is used with a model that doesn't support batching, - PyTritonBadParameterError: if any of the keys passed to the decorator are not allowed. - """ - if any(k in _SPECIAL_KEYS for k in keys): - not_allowed_keys = [key for key in keys if key in _SPECIAL_KEYS] - raise PyTritonBadParameterError( - f"The keys {', '.join(not_allowed_keys)} are not allowed as keys for @first_value wrapper. " - f"The set of not allowed keys are {', '.join(_SPECIAL_KEYS)}" - ) - - @wrapt.decorator - def wrapper(wrapped, instance, args, kwargs): - model_config = get_model_config(wrapped, instance) - if not model_config.batching: - raise PyTritonRuntimeError("The @first_value decorator can only be used with models that support batching.") - - def _replace_inputs_with_first_value(_request): - for input_name in keys: - if input_name not in _request: - continue - - values = _request[input_name] - if strict: - # do not set axis for arrays with strings (object) or models not supporting batching - axis_of_uniqueness = None if values.dtype == object else 0 - unique_values = np.unique(values, axis=axis_of_uniqueness) - if len(unique_values) > 1: - raise PyTritonRuntimeError( - f"The values on the {input_name!r} input are not equal. " - "To proceed, either disable strict mode in @first_value wrapper " - "or ensure that the values always are consistent. " - f"The current values of {input_name!r} are {_request[input_name]!r}." - ) - - _first_value = values[0] - if ( - squeeze_single_values - and not np.isscalar(_first_value) - and all(dim == 1 for dim in _first_value.shape) - ): - _dim_0_array = np.squeeze(_first_value) - _first_value = _dim_0_array[()] # obtain scalar from 0-dim array with numpy type - - _request[input_name] = _first_value - return _request - - inputs_names = set(kwargs) - set(_SPECIAL_KEYS) - if inputs_names: - kwargs = _replace_inputs_with_first_value(kwargs) - return wrapped(*args, **kwargs) - else: - requests, *other_args = args - requests = [_replace_inputs_with_first_value(request) for request in requests] - return wrapped(requests, *other_args, **kwargs) - - return wrapper diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/exceptions.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/exceptions.py deleted file mode 100644 index 7bcaff50ac46a10449bf70b27bb69d909279197f..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/exceptions.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""PyTriton exceptions definition.""" - - -class PyTritonError(Exception): - """Generic PyTriton exception.""" - - def __init__(self, message: str): - """Initialize exception with message. - - Args: - message: Error message - """ - self._message = message - - def __str__(self) -> str: - """Return exception as a string. - - Returns: - Message content - """ - return self._message - - @property - def message(self): - """Get the exception message. - - Returns: - The message associated with this exception, or None if no message. - - """ - return self._message - - -class PyTritonValidationError(PyTritonError): - """PyTriton configuration validation exception.""" - - pass - - -class PyTritonInvalidOperationError(PyTritonError): - """PyTriton invalid operation exception.""" - - pass - - -class PyTritonBadParameterError(PyTritonError): - """PyTriton invalid parameter exception.""" - - pass - - -class PyTritonModelConfigError(PyTritonError): - """PyTriton invalid model config exception.""" - - pass - - -class PyTritonUnrecoverableError(PyTritonError): - """Unrecoverable error occurred in inference callable, thus no further inferences possible.""" - - pass - - -class PyTritonRuntimeError(PyTritonError): - """Raised when an error is detected that doesn’t fall in any of the other categories.""" - - pass diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/__init__.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/__init__.py deleted file mode 100644 index 9698bf59ee712a76ff439a991a2089f2c4edeac8..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# noqa: D104 -from .common import DeviceKind, DynamicBatcher, QueuePolicy, TimeoutAction # noqa: F401 -from .model_config import ModelConfig # noqa: F401 -from .tensor import Tensor # noqa: F401 diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/common.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/common.py deleted file mode 100644 index 1d58024be30bda87500b5326cbb9de66d5b073bd..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/common.py +++ /dev/null @@ -1,93 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Common structures for internal and external ModelConfig.""" - -import dataclasses -import enum -from typing import Dict, Optional - - -class DeviceKind(enum.Enum): - """Device kind for model deployment. - - Args: - KIND_AUTO: Automatically select the device for model deployment. - KIND_CPU: Model is deployed on CPU. - KIND_GPU: Model is deployed on GPU. - """ - - KIND_AUTO = "KIND_AUTO" - KIND_CPU = "KIND_CPU" - KIND_GPU = "KIND_GPU" - - -class TimeoutAction(enum.Enum): - """Timeout action definition for timeout_action QueuePolicy field. - - Args: - REJECT: Reject the request and return error message accordingly. - DELAY: Delay the request until all other requests at the same (or higher) priority levels - that have not reached their timeouts are processed. - """ - - REJECT = "REJECT" - DELAY = "DELAY" - - -@dataclasses.dataclass -class QueuePolicy: - """Model queue policy configuration. - - More in Triton Inference Server [documentation] - [documentation]: https://github.com/triton-inference-server/common/blob/main/protobuf/model_config.proto#L1037 - - Args: - timeout_action: The action applied to timed-out request. - default_timeout_microseconds: The default timeout for every request, in microseconds. - allow_timeout_override: Whether individual request can override the default timeout value. - max_queue_size: The maximum queue size for holding requests. - """ - - timeout_action: TimeoutAction = TimeoutAction.REJECT - default_timeout_microseconds: int = 0 - allow_timeout_override: bool = False - max_queue_size: int = 0 - - -@dataclasses.dataclass -class DynamicBatcher: - """Dynamic batcher configuration. - - More in Triton Inference Server [documentation] - [documentation]: https://github.com/triton-inference-server/common/blob/main/protobuf/model_config.proto#L1104 - - Args: - max_queue_delay_microseconds: The maximum time, in microseconds, a request will be delayed in - the scheduling queue to wait for additional requests for batching. - preferred_batch_size: Preferred batch sizes for dynamic batching. - preserve_ordering : Should the dynamic batcher preserve the ordering of responses to - match the order of requests received by the scheduler. - priority_levels: The number of priority levels to be enabled for the model. - default_priority_level: The priority level used for requests that don't specify their priority. - default_queue_policy: The default queue policy used for requests. - priority_queue_policy: Specify the queue policy for the priority level. - """ - - max_queue_delay_microseconds: int = 0 - preferred_batch_size: Optional[list] = None - preserve_ordering: bool = False - priority_levels: int = 0 - default_priority_level: int = 0 - default_queue_policy: Optional[QueuePolicy] = None - priority_queue_policy: Optional[Dict[int, QueuePolicy]] = None diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/generator.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/generator.py deleted file mode 100644 index c425466027fd2340bcdae081cf80ddd9d486d4a5..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/generator.py +++ /dev/null @@ -1,284 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Generator class for creating Triton model config. - -The class consume the TritonModelConfig object as a constructor argument and produce the Triton model config in form of -dict or file. - - Typical usage example: - - model_config = TritonModelConfig(model_name="simple") - generator = ModelConfigGenerator(model_config) - generator.to_file("/path/to/config.pbtxt") -""" - -import json -import logging -import pathlib -from typing import Dict, Union - -import numpy as np -from google.protobuf import json_format, text_format # pytype: disable=pyi-error - -from pytriton.exceptions import PyTritonBadParameterError - -from .triton_model_config import DynamicBatcher, TensorSpec, TritonModelConfig - -try: - import tritonclient.grpc as grpc_client - from tritonclient import utils as client_utils # noqa: F401 -except ImportError: - try: - import tritonclientutils as client_utils # noqa: F401 - import tritongrpcclient as grpc_client - except ImportError: - client_utils = None - grpc_client = None - -LOGGER = logging.getLogger(__name__) - - -class ModelConfigGenerator: - """Generate the protobuf config from ModelConfig object.""" - - def __init__(self, config: TritonModelConfig): - """Initialize generator. - - Args: - config: model config object - """ - self._config = config - - def to_file(self, config_path: Union[str, pathlib.Path]) -> str: - """Serialize ModelConfig to prototxt and save to config_path directory. - - Args: - config_path: path to configuration file - - Returns: - A string with generated model configuration - """ - from tritonclient.grpc import model_config_pb2 # pytype: disable=import-error - - # https://github.com/triton-inference-server/common/blob/main/protobuf/model_config.proto - model_config = self.get_config() - LOGGER.debug(f"Generated Triton config:\n{json.dumps(model_config, indent=4)}") - - config_payload = json_format.ParseDict(model_config, model_config_pb2.ModelConfig()) - LOGGER.debug(f"Generated Triton config payload:\n{config_payload}") - - config_path = pathlib.Path(config_path) - config_path.parent.mkdir(parents=True, exist_ok=True) - - model_config_bytes = text_format.MessageToBytes(config_payload) - - # WAR: triton requires max_batch_size = 0 to be explicit written - # while this is not stored in payload during MessageToBytes - if model_config["max_batch_size"] == 0: - model_config_bytes += b"max_batch_size: 0\n" - - with config_path.open("wb") as cfg: - cfg.write(model_config_bytes) - - LOGGER.debug(f"Generated config stored in {config_path}") - - return config_payload - - def get_config(self) -> Dict: - """Create a Triton model config from ModelConfig object. - - Returns: - Dict with model configuration data - """ - model_config = {"name": self._config.model_name, "backend": self._config.backend} - self._set_batching(model_config) - self._set_model_signature(model_config) - self._set_instance_group(model_config) - self._set_model_transaction_policy(model_config) - self._set_backend_parameters(model_config) - self._set_response_cache(model_config) - return model_config - - def _set_batching(self, model_config: Dict) -> None: - """Configure batching for model deployment on Triton Inference Server. - - Args: - model_config: Dict with model config for Triton Inference Server - """ - if not self._config.batching: - model_config["max_batch_size"] = 0 - LOGGER.debug("Batching for model is disabled. The `max_batch_size` field value set to 0.") - return - elif self._config.max_batch_size < 1: - raise PyTritonBadParameterError("The `max_batch_size` must be greater or equal to 1.") - - model_config["max_batch_size"] = self._config.max_batch_size - if isinstance(self._config.batcher, DynamicBatcher): - dynamic_batching_config = {} - if self._config.batcher.max_queue_delay_microseconds > 0: - dynamic_batching_config["maxQueueDelayMicroseconds"] = int( - self._config.batcher.max_queue_delay_microseconds - ) - - if self._config.batcher.preferred_batch_size: - dynamic_batching_config["preferredBatchSize"] = [ - int(bs) for bs in self._config.batcher.preferred_batch_size - ] - - if self._config.batcher.preserve_ordering: - dynamic_batching_config["preserveOrdering"] = self._config.batcher.preserve_ordering - - if self._config.batcher.priority_levels: - dynamic_batching_config["priorityLevels"] = self._config.batcher.priority_levels - - if self._config.batcher.default_priority_level: - if self._config.batcher.default_priority_level > self._config.batcher.priority_levels: - raise PyTritonBadParameterError( - "The `default_priority_level` must be between 1 and " f"{self._config.batcher.priority_levels}." - ) - dynamic_batching_config["defaultPriorityLevel"] = self._config.batcher.default_priority_level - - if self._config.batcher.default_queue_policy: - priority_queue_policy_config = { - "timeoutAction": self._config.batcher.default_queue_policy.timeout_action.value, - "defaultTimeoutMicroseconds": int( - self._config.batcher.default_queue_policy.default_timeout_microseconds - ), - "allowTimeoutOverride": self._config.batcher.default_queue_policy.allow_timeout_override, - "maxQueueSize": int(self._config.batcher.default_queue_policy.max_queue_size), - } - dynamic_batching_config["defaultQueuePolicy"] = priority_queue_policy_config - - if self._config.batcher.priority_queue_policy: - if not self._config.batcher.priority_levels: - raise PyTritonBadParameterError( - "Provide the `priority_levels` if you want to define `priority_queue_policy` " - "for Dynamic Batching." - ) - - priority_queue_policy_config = {} - for priority, queue_policy in self._config.batcher.priority_queue_policy.items(): - if priority < 0 or priority > self._config.batcher.priority_levels: - raise PyTritonBadParameterError( - f"Invalid `priority`={priority} provided. The value must be between " - f"1 and {self._config.batcher.priority_levels}." - ) - - priority_queue_policy_config[priority] = { - "timeoutAction": queue_policy.timeout_action.value, - "defaultTimeoutMicroseconds": int(queue_policy.default_timeout_microseconds), - "allowTimeoutOverride": queue_policy.allow_timeout_override, - "maxQueueSize": int(queue_policy.max_queue_size), - } - - dynamic_batching_config["priorityQueuePolicy"] = priority_queue_policy_config - - model_config["dynamic_batching"] = dynamic_batching_config - else: - LOGGER.debug("Default batching used") - - def _set_instance_group(self, model_config: Dict) -> None: - """Configure instance group for model deployment on Triton Inference Server. - - Args: - model_config: Dict with model config for Triton Inference Server - """ - instance_groups = [] - for device_kind, count in self._config.instance_group.items(): - instance_groups.append({ - "count": count, - "kind": device_kind.value, - }) - - if instance_groups: - model_config["instance_group"] = instance_groups - - def _set_model_transaction_policy(self, model_config: Dict) -> None: - """Configure model transaction policy for model deployment on Triton Inference Server. - - Args: - model_config: Dict with model config for Triton Inference Server - """ - if self._config.decoupled: - model_config["model_transaction_policy"] = {"decoupled": True} - - def _set_backend_parameters(self, model_config: Dict) -> None: - """Configure backend parameters for model deployment on Triton Inference Server. - - Args: - model_config: Dict with model config for Triton Inference Server - """ - parameters = {} - for key, value in self._config.backend_parameters.items(): - parameters[key] = { - "string_value": str(value), - } - - if parameters: - model_config["parameters"] = parameters - - def _set_model_signature(self, model_config: Dict) -> None: - """Configure model signature for model deployment on Triton Inference Server. - - Args: - model_config: Dict with model config for Triton Inference Server - - """ - - def _rewrite_io_spec(spec_: TensorSpec) -> Dict: - if spec_.dtype in [np.object_, object, bytes, np.bytes_]: - dtype = "TYPE_STRING" - else: - # pytype: disable=attribute-error - dtype = spec_.dtype().dtype - # pytype: enable=attribute-error - dtype = f"TYPE_{client_utils.np_to_triton_dtype(dtype)}" - - dims = spec_.shape - - item = { - "name": spec_.name, - "dims": list(dims), - "data_type": dtype, - } - - if spec_.optional: - item["optional"] = True - - return item - - if self._config.inputs: - model_config["input"] = [_rewrite_io_spec(spec) for spec in self._config.inputs] - - if self._config.outputs: - outputs = [_rewrite_io_spec(spec) for spec in self._config.outputs] - if outputs: - optional_outputs = [o for o in outputs if o.get("optional")] - if optional_outputs: - raise PyTritonBadParameterError( - "Optional flag for outputs is not supported. " - f"Outputs marked as optional: {', '.join([o['name'] for o in optional_outputs])}." - ) - model_config["output"] = outputs - - def _set_response_cache(self, model_config: Dict): - """Configure response cache for model. - - Args: - model_config: Dictionary where configuration is attached. - """ - if self._config.response_cache: - model_config["response_cache"] = { - "enable": self._config.response_cache.enable, - } diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/model_config.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/model_config.py deleted file mode 100644 index 6b4f28a63583774bfd0983eba973091d98d0fa5c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/model_config.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Model configurations. - -Dataclasses with specialized deployment paths for models on Triton. The purpose of this module is to provide clear options -to configure models of given types. - -The dataclasses are exposed in the user API. -""" - -import dataclasses - -from pytriton.model_config import DynamicBatcher - - -@dataclasses.dataclass -class ModelConfig: - """Additional model configuration for running model through Triton Inference Server. - - Args: - batching: Flag to enable/disable batching for model. - max_batch_size: The maximal batch size that would be handled by model. - batcher: Configuration of Dynamic Batching for the model. - response_cache: Flag to enable/disable response cache for the model - decoupled: Flag to enable/disable decoupled from requests execution - """ - - batching: bool = True - max_batch_size: int = 4 - batcher: DynamicBatcher = dataclasses.field(default_factory=DynamicBatcher) - response_cache: bool = False - decoupled: bool = False diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/parser.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/parser.py deleted file mode 100644 index f51ebae4c5692af2f95eabe5db07a50c0a2897c2..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/parser.py +++ /dev/null @@ -1,258 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""ModelConfigParser class definition. - -Provide functionality to parse the Triton model configuration stored in file or form of dictionary into the object of -class ModelConfig. - - Examples of use: - - # Parse from dict - model_config = ModelConfigParser.from_dict(model_config_dict) - - # Parse from file - model_config = ModelConfigParser.from_file("/path/to/config.pbtxt") - -""" - -import json -import logging -import pathlib -from typing import Dict - -import numpy as np -from google.protobuf import json_format, text_format # pytype: disable=pyi-error - -from pytriton.exceptions import PyTritonModelConfigError - -from .common import QueuePolicy, TimeoutAction -from .triton_model_config import DeviceKind, DynamicBatcher, ResponseCache, TensorSpec, TritonModelConfig - -try: - import tritonclient.grpc as grpc_client - from tritonclient import utils as client_utils # noqa: F401 -except ImportError: - try: - import tritonclientutils as client_utils # noqa: F401 - import tritongrpcclient as grpc_client - except ImportError: - client_utils = None - grpc_client = None - -LOGGER = logging.getLogger(__name__) - - -class ModelConfigParser: - """Provide functionality to parse dictionary or file to ModelConfig object.""" - - @classmethod - def from_dict(cls, model_config_dict: Dict) -> TritonModelConfig: - """Create ModelConfig from configuration stored in dictionary. - - Args: - model_config_dict: Dictionary with model config - - Returns: - A ModelConfig object with data parsed from the dictionary - """ - LOGGER.debug(f"Parsing Triton config model from dict: \n{json.dumps(model_config_dict, indent=4)}") - - if model_config_dict.get("max_batch_size", 0) > 0: - batching = True - else: - batching = False - - dynamic_batcher_config = model_config_dict.get("dynamic_batching") - if dynamic_batcher_config is not None: - batcher = cls._parse_dynamic_batching(dynamic_batcher_config) - else: - batcher = None - - instance_group = { - DeviceKind(entry["kind"]): entry.get("count") for entry in model_config_dict.get("instance_group", []) - } - - decoupled = model_config_dict.get("model_transaction_policy", {}).get("decoupled", False) - - backend_parameters_config = model_config_dict.get("parameters", []) - if isinstance(backend_parameters_config, list): - # If the backend_parameters_config is a list of strings, use them as keys with empty values - LOGGER.debug(f"backend_parameters_config is a list of strings: {backend_parameters_config}") - backend_parameters = {name: "" for name in backend_parameters_config} - elif isinstance(backend_parameters_config, dict): - # If the backend_parameters_config is a dictionary, use the key and "string_value" fields as key-value pairs - LOGGER.debug(f"backend_parameters_config is a dictionary: {backend_parameters_config}") - backend_parameters = { - name: backend_parameters_config[name]["string_value"] for name in backend_parameters_config - } - else: - # Otherwise, raise an error - LOGGER.error( - f"Invalid type {type(backend_parameters_config)} for backend_parameters_config: {backend_parameters_config}" - ) - raise TypeError(f"Invalid type for backend_parameters_config: {type(backend_parameters_config)}") - - inputs = [ - cls.rewrite_io_spec(item, "input", idx) for idx, item in enumerate(model_config_dict.get("input", [])) - ] or None - outputs = [ - cls.rewrite_io_spec(item, "output", idx) for idx, item in enumerate(model_config_dict.get("output", [])) - ] or None - - response_cache_config = model_config_dict.get("response_cache") - if response_cache_config: - response_cache = cls._parse_response_cache(response_cache_config) - else: - response_cache = None - - return TritonModelConfig( - model_name=model_config_dict["name"], - batching=batching, - max_batch_size=model_config_dict.get("max_batch_size", 0), - batcher=batcher, - inputs=inputs, - outputs=outputs, - instance_group=instance_group, - decoupled=decoupled, - backend_parameters=backend_parameters, - response_cache=response_cache, - ) - - @classmethod - def from_file(cls, *, config_path: pathlib.Path) -> TritonModelConfig: - """Create ModelConfig from configuration stored in file. - - Args: - config_path: location of file with model config - - Returns: - A ModelConfig object with data parsed from the file - """ - from tritonclient.grpc import model_config_pb2 # pytype: disable=import-error - - LOGGER.debug(f"Parsing Triton config model config_path={config_path}") - - with config_path.open("r") as config_file: - payload = config_file.read() - model_config_proto = text_format.Parse(payload, model_config_pb2.ModelConfig()) - - model_config_dict = json_format.MessageToDict(model_config_proto, preserving_proto_field_name=True) - return ModelConfigParser.from_dict(model_config_dict=model_config_dict) - - @classmethod - def rewrite_io_spec(cls, item: Dict, io_type: str, idx: int) -> TensorSpec: - """Rewrite the IO Spec provided in form of dictionary to TensorSpec. - - Args: - item: IO data for input - io_type: Type of the IO (input or output) - idx: Index of IO - - Returns: - TensorSpec with input or output data - """ - name = item.get("name") - if not name: - raise PyTritonModelConfigError(f"Name for {io_type} at index {idx} not provided.") - - data_type = item.get("data_type") - if not data_type: - raise PyTritonModelConfigError(f"Data type for {io_type} with name `{name}` not defined.") - - data_type_val = data_type.split("_") - if len(data_type_val) != 2: - raise PyTritonModelConfigError( - f"Invalid data type `{data_type}` for {io_type} with name `{name}` not defined. " - "The expected name is TYPE_{type}." - ) - - data_type = data_type_val[1] - if data_type == "STRING": - dtype = np.bytes_ - else: - dtype = client_utils.triton_to_np_dtype(data_type) - if dtype is None: - raise PyTritonModelConfigError(f"Unsupported data type `{data_type}` for {io_type} with name `{name}`") - - dtype = np.dtype("bool") if dtype is bool else dtype - - dims = item.get("dims", []) - if not dims: - raise PyTritonModelConfigError(f"Dimension for {io_type} with name `{name}` not defined.") - - shape = tuple(int(s) for s in dims) - - optional = item.get("optional", False) - return TensorSpec(name=item["name"], shape=shape, dtype=dtype, optional=optional) - - @classmethod - def _parse_dynamic_batching(cls, dynamic_batching_config: Dict) -> DynamicBatcher: - """Parse config to create DynamicBatcher object. - - Args: - dynamic_batching_config: Configuration of dynamic batcher from config - - Returns: - DynamicBatcher object with configuration - """ - default_queue_policy = None - default_queue_policy_config = dynamic_batching_config.get("default_queue_policy") - if default_queue_policy_config: - default_queue_policy = QueuePolicy( - timeout_action=TimeoutAction( - default_queue_policy_config.get("timeout_action", TimeoutAction.REJECT.value) - ), - default_timeout_microseconds=int(default_queue_policy_config.get("default_timeout_microseconds", 0)), - allow_timeout_override=bool(default_queue_policy_config.get("allow_timeout_override", False)), - max_queue_size=int(default_queue_policy_config.get("max_queue_size", 0)), - ) - - priority_queue_policy = None - priority_queue_policy_config = dynamic_batching_config.get("priority_queue_policy") - if priority_queue_policy_config: - priority_queue_policy = {} - for priority, queue_policy_config in priority_queue_policy_config.items(): - queue_policy = QueuePolicy( - timeout_action=TimeoutAction(queue_policy_config.get("timeout_action", TimeoutAction.REJECT.value)), - default_timeout_microseconds=int(queue_policy_config.get("default_timeout_microseconds", 0)), - allow_timeout_override=bool(queue_policy_config.get("allow_timeout_override", False)), - max_queue_size=int(queue_policy_config.get("max_queue_size", 0)), - ) - priority_queue_policy[int(priority)] = queue_policy - - batcher = DynamicBatcher( - preferred_batch_size=dynamic_batching_config.get("preferred_batch_size"), - max_queue_delay_microseconds=int(dynamic_batching_config.get("max_queue_delay_microseconds", 0)), - preserve_ordering=bool(dynamic_batching_config.get("preserve_ordering", False)), - priority_levels=int(dynamic_batching_config.get("priority_levels", 0)), - default_priority_level=int(dynamic_batching_config.get("default_priority_level", 0)), - default_queue_policy=default_queue_policy, - priority_queue_policy=priority_queue_policy, - ) - return batcher - - @classmethod - def _parse_response_cache(cls, response_cache_config: Dict) -> ResponseCache: - """Parse config for response cache. - - Args: - response_cache_config: response cache configuration - - Returns: - ResponseCache object with configuration - """ - response_cache = ResponseCache( - enable=bool(response_cache_config["enable"]), - ) - return response_cache diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/tensor.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/tensor.py deleted file mode 100644 index ded9050c6f0ebe939de8698dadc84c79e0ae903c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/tensor.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tensor object definition. - -Describe the model input or output. - - Examples of use: - - # Minimal constructors - tensor = Tensor(dtype=np.bytes_, shape=(-1,)) - tensor = Tensor(dtype=np.float32, shape=(-1,)) - - # Type definition from existing object - a = np.array([1, 2, 3, 4]) - tensor = Tensor(dtype=a.dtype, shape=(-1,)) - - # Custom name - tensor = Tensor(name="data", dtype=np.float32, shape=(16,)) -""" - -import dataclasses -from typing import Optional, Type, Union - -import numpy as np - - -@dataclasses.dataclass(frozen=True) -class Tensor: - """Model input and output definition for Triton deployment. - - Args: - shape: Shape of the input/output tensor. - dtype: Data type of the input/output tensor. - name: Name of the input/output of model. - optional: Flag to mark if input is optional. - """ - - shape: tuple - dtype: Union[np.dtype, Type[np.dtype], Type[object]] - name: Optional[str] = None - optional: Optional[bool] = False - - def __post_init__(self): - """Override object values on post init or field override.""" - if isinstance(self.dtype, np.dtype): - object.__setattr__(self, "dtype", self.dtype.type) # pytype: disable=attribute-error diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/triton_model_config.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/triton_model_config.py deleted file mode 100644 index 87aa276c30840c1639d0b6b1b0183edcd1016889..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/model_config/triton_model_config.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""ModelConfig related objects.""" - -import dataclasses -from typing import Dict, Optional, Sequence, Type, Union - -import numpy as np - -from .common import DeviceKind, DynamicBatcher - - -@dataclasses.dataclass -class ResponseCache: - """Model response cache configuration. - - More in Triton Inference Server [documentation] - [documentation]: https://github.com/triton-inference-server/common/blob/main/protobuf/model_config.proto#L1765 - """ - - enable: bool - - -@dataclasses.dataclass -class TensorSpec: - """Stores specification of single tensor. This includes name, shape and dtype.""" - - name: str - shape: tuple - dtype: Union[Type[np.dtype], Type[object]] - optional: Optional[bool] = False - - -@dataclasses.dataclass -class TritonModelConfig: - """Triton Model Config dataclass for simplification and specialization of protobuf config generation. - - More in Triton Inference Server [documentation] - [documentation]: https://github.com/triton-inference-server/common/blob/main/protobuf/model_config.proto - """ - - model_name: str - model_version: int = 1 - max_batch_size: int = 4 - batching: bool = True - batcher: Optional[DynamicBatcher] = None - instance_group: Dict[DeviceKind, Optional[int]] = dataclasses.field(default_factory=lambda: {}) - decoupled: bool = False - backend_parameters: Dict[str, str] = dataclasses.field(default_factory=lambda: {}) - inputs: Optional[Sequence[TensorSpec]] = None - outputs: Optional[Sequence[TensorSpec]] = None - response_cache: Optional[ResponseCache] = None - - @property - def backend(self) -> str: - """Return backend parameter.""" - return "python" diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/models/__init__.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/models/__init__.py deleted file mode 100644 index 8010bd32129eb99ce3ce66981b81d3ba41bf287b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/models/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# noqa: D104 diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/models/manager.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/models/manager.py deleted file mode 100644 index 693f8013d4cc6cc91e0c7a3e735bfd11f2bac400..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/models/manager.py +++ /dev/null @@ -1,147 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""ModelManager class. - -The ModelManager is responsible for maintaining the models that has to be server on Triton Inference Server. - - Examples of use: - manager = ModelManager(model_repository) - manager.add_model(model) - - manager.create_models() -""" - -import contextlib -import json -import logging -import pathlib -import socket -from typing import Dict, Iterable, Optional, Tuple - -from tritonclient.grpc import InferenceServerException - -from pytriton.client import ModelClient -from pytriton.client.utils import create_client_from_url, wait_for_server_ready -from pytriton.constants import CREATE_TRITON_CLIENT_TIMEOUT_S, DEFAULT_TRITON_STARTUP_TIMEOUT_S -from pytriton.exceptions import PyTritonInvalidOperationError -from pytriton.models.model import Model - -LOGGER = logging.getLogger(__name__) - - -class ModelManager: - """ModelManager class for maintaining Triton models.""" - - def __init__( - self, - triton_url: str, - model_store_path: Optional[pathlib.Path] = None, - ): - """Create ModelManager object. - - Args: - triton_url: Triton server URL - model_store_path: Path to local model store - """ - self._triton_url = triton_url - self._models: Dict[Tuple[str, int], Model] = {} - self._model_store_path = model_store_path - - @property - def models(self) -> Iterable[Model]: - """List models added to manage. - - Returns: - List with models added to ModelManager. - """ - return self._models.values() - - def add_model(self, model: Model, load_model: bool = False) -> None: - """Add model to manage. - - Args: - model: Model instance - load_model: If True, model will be loaded to Triton server. - """ - key = self._format_key(model) - if key in self._models: - raise PyTritonInvalidOperationError("Cannot add model with the same name twice.") - - LOGGER.debug(f"Adding {model.model_name} ({model.model_version}) to registry under {key}.") - self._models[key] = model - - _is_model_store_local = self._model_store_path is not None - if _is_model_store_local: - model.generate_model(self._model_store_path) - - if load_model: - self._load_model(model, _is_model_store_local) - model.setup() - - def load_models(self) -> None: - """Load bound models to Triton server and setup loaded models.""" - for model in self._models.values(): - if not model.is_alive(): - self._load_model(model) - model.setup() - - def setup_models(self) -> None: - """Setup loaded models.""" - for model in self._models.values(): - if not model.is_alive(): - model.setup() - - def clean(self) -> None: - """Clean the model and internal registry.""" - with contextlib.closing( - create_client_from_url(self._triton_url, network_timeout_s=CREATE_TRITON_CLIENT_TIMEOUT_S) - ) as client: - server_live = False - try: - server_live = client.is_server_live() - # TimeoutError and ConnectionRefusedError are derived from OSError so they are redundant here - # OSError is raised from gevent/_socketcommon.py:590 sometimes, when server is not ready - except (socket.timeout, OSError, InferenceServerException): - pass - except Exception as ex: - LOGGER.error(f"Unexpected exception during server live check: {ex}") - raise ex - - for name, model in self._models.items(): - LOGGER.debug(f"Clean model {name}.") - model.clean() - if server_live: - client.unload_model(model.model_name) - - if server_live: - # after unload there is a short period of time when server is not ready - wait_for_server_ready(client, timeout_s=DEFAULT_TRITON_STARTUP_TIMEOUT_S) - - self._models.clear() - - def _format_key(self, model: Model) -> Tuple[str, int]: - key = (model.model_name.lower(), model.model_version) - return key - - def _load_model(self, model: Model, local_model_store=False): - """Prepare model config and required files dict and load model to Triton server.""" - LOGGER.debug(f"Creating model {model.model_name} with version {model.model_version}.") - config = None if local_model_store else json.dumps(model.get_model_config()) - files = None if local_model_store else model.get_proxy_model_files() - with ModelClient( - url=self._triton_url, model_name=model.model_name, model_version=str(model.model_version) - ) as client: - client.wait_for_server(timeout_s=DEFAULT_TRITON_STARTUP_TIMEOUT_S) - client.load_model(config=config, files=files) - LOGGER.debug("Done.") diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/models/model.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/models/model.py deleted file mode 100644 index 3c0890a5db831982435bce1a6da03d1d87356ff0..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/models/model.py +++ /dev/null @@ -1,335 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Model base class.""" - -import base64 -import copy -import enum -import json -import logging -import os -import pathlib -import shutil -import threading -import typing -from typing import Callable, List, Optional, Sequence, Union - -from pytriton.decorators import TritonContext -from pytriton.exceptions import PyTritonValidationError -from pytriton.model_config.generator import ModelConfigGenerator -from pytriton.model_config.model_config import ModelConfig -from pytriton.model_config.tensor import Tensor -from pytriton.model_config.triton_model_config import DeviceKind, ResponseCache, TensorSpec, TritonModelConfig -from pytriton.proxy.communication import get_config_from_handshake_server -from pytriton.proxy.data import Base64SerializerDeserializer, TensorStoreSerializerDeserializer -from pytriton.proxy.inference import InferenceHandler, InferenceHandlerEvent, RequestsResponsesConnector -from pytriton.proxy.validators import TritonResultsValidator -from pytriton.utils.workspace import Workspace - -LOGGER = logging.getLogger(__name__) - - -class ModelEvent(enum.Enum): - """Represents model event.""" - - RUNTIME_TERMINATING = "runtime-terminating" - RUNTIME_TERMINATED = "runtime-terminated" - - -ModelEventsHandler = typing.Callable[["Model", ModelEvent, typing.Optional[typing.Any]], None] - - -def _inject_triton_context(triton_context: TritonContext, model_callable: Callable) -> Callable: - """Inject triton context into callable. - - Args: - triton_context: Triton context - model_callable: Callable to inject triton context - - Returns: - Callable with injected triton context - """ - if hasattr(model_callable, "__self__"): - model_callable.__self__.__triton_context__ = triton_context - else: - model_callable.__triton_context__ = triton_context - return model_callable - - -class Model: - """Model definition.""" - - SCRIPT_FILES_TO_COPY = ["communication.py", "data.py", "model.py", "types.py", "telemetry.py"] - - def __init__( - self, - model_name: str, - model_version: int, - inference_fn: Union[Callable, Sequence[Callable]], - inputs: Sequence[Tensor], - outputs: Sequence[Tensor], - config: ModelConfig, - workspace: Workspace, - triton_context: TritonContext, - strict: bool, - trace_config: Optional[List[str]] = None, - ): - """Create Python model with required data. - - Args: - model_name: Model name - model_version: Model version - inference_fn: Inference handler (function or lambda) - inputs: Model inputs definition - outputs: Model outputs definition - config: model configuration parameters - workspace: workspace for storing artifacts - triton_context: Triton context - strict: Enable strict validation of model outputs - trace_config: List of trace config parameters - - Raises: - PyTritonValidationError if one or more of provided values are incorrect. - """ - self.triton_context = triton_context - self.model_name = model_name - self.model_version = model_version - self._inference_handlers_lock = threading.Lock() - self._inference_handlers = [] - self._requests_respones_connectors = [] - self._observers_lock = threading.Lock() - self._strict = strict - self._trace_config = trace_config - - self.infer_functions = [inference_fn] if isinstance(inference_fn, Callable) else inference_fn - if not isinstance(self.infer_functions, (Sequence, Callable)): - raise PyTritonValidationError("inference_fn has to be either callable or sequence of callables") - - self.inputs = inputs - self.outputs = outputs - - if any(output.optional for output in self.outputs): - raise PyTritonValidationError("Output tensors cannot be optional.") - - self.config = config - self._workspace = workspace - if os.environ.get("PYTRITON_NO_TENSORSTORE"): - self._serializer_deserializer = Base64SerializerDeserializer() - else: - self._serializer_deserializer = TensorStoreSerializerDeserializer() - self._triton_model_config: Optional[TritonModelConfig] = None - self._model_events_observers: typing.List[ModelEventsHandler] = [] - - def get_model_config(self) -> dict: - """Get model config. - - Returns: - Dictionary with model config - """ - triton_model_config = self._get_triton_model_config() - generator = ModelConfigGenerator(config=triton_model_config) - return generator.get_config() - - def get_proxy_model_files(self) -> typing.Dict[str, bytes]: - """Get proxy model files. - - Returns: - Dictionary with model files to be copied to Triton model store on server side: - key: file path in following format - 'file:{model_version}/{file_name}' - value: file content as bytes - """ - proxy_model_files_dict = {} - proxy_path = pathlib.Path(__file__).parent.parent / "proxy" - for file_to_copy in self.SCRIPT_FILES_TO_COPY: - src_file_path = proxy_path / file_to_copy - with open(src_file_path, "rb") as f: - src_file = f.read() - proxy_model_files_dict[f"file:{self.model_version}/{file_to_copy}"] = src_file - - return proxy_model_files_dict - - def generate_model(self, model_repository: pathlib.Path) -> None: - """Generate model and its config in the model repository. - - Args: - model_repository: Path to Triton model repository - - Raises: - OSError: when model repository not exists - """ - LOGGER.debug( - f"Generating model and config for {self.model_name} and {self.model_version} to {model_repository}" - ) - - model_catalog = model_repository / self.model_name - - config_file_path = model_catalog / "config.pbtxt" - if config_file_path.exists(): - LOGGER.warning(f"The config file {config_file_path} is going to be overridden.") - - triton_model_config = self._get_triton_model_config() - generator = ModelConfigGenerator(config=triton_model_config) - generator.to_file(config_file_path) - - model_version_catalog = model_catalog / str(self.model_version) - model_version_catalog.mkdir(exist_ok=True, parents=True) - - proxy_path = pathlib.Path(__file__).parent.parent / "proxy" - - for script_file in self.SCRIPT_FILES_TO_COPY: - src_file_path = proxy_path / script_file - dst_file_path = model_version_catalog / script_file - shutil.copy(src_file_path, dst_file_path) - - def setup(self) -> None: - """Create deployments and bindings to Triton Inference Server.""" - with self._inference_handlers_lock: - if not self._inference_handlers: - triton_model_config = self._get_triton_model_config() - workspace_path = pathlib.Path(triton_model_config.backend_parameters["workspace-path"]) - validator = TritonResultsValidator(triton_model_config, self._strict) - - inference_handler_config_path = workspace_path / f"{self.model_name}-config.sock" - inference_handler_config = get_config_from_handshake_server(inference_handler_config_path) - - data_socket = pathlib.Path(inference_handler_config["data_socket"]) - authkey = base64.decodebytes(inference_handler_config["authkey"].encode("ascii")) - self._serializer_deserializer.connect(data_socket.as_posix(), authkey) - - for i, infer_function in enumerate(self.infer_functions): - self.triton_context.model_configs[infer_function] = copy.deepcopy(triton_model_config) - _inject_triton_context(self.triton_context, infer_function) - - request_server_socket = workspace_path / f"{self.model_name}_0_{i}-server.sock" - request_server_socket = f"ipc://{request_server_socket.as_posix()}" - - requests_respones_connector = RequestsResponsesConnector( - url=request_server_socket, - serializer_deserializer=self._serializer_deserializer, - ) - requests_respones_connector.start() - self._requests_respones_connectors.append(requests_respones_connector) - inference_handler = InferenceHandler( - model_callable=infer_function, - requests_responses_connector=requests_respones_connector, - validator=validator, - name=f"inference_handler-{i}", - ) - inference_handler.on_inference_handler_event(self._on_inference_handler_event) - inference_handler.start() - self._inference_handlers.append(inference_handler) - - def clean(self) -> None: - """Post unload actions to perform on model.""" - with self._observers_lock: - LOGGER.debug("Clearing model events observers") - self._model_events_observers.clear() - LOGGER.debug("Socket closed. Waiting for inference handler and communication threads to shut down") - with self._inference_handlers_lock: - for inference_handler in self._inference_handlers: - inference_handler.stop() - for inference_handler in self._inference_handlers: - inference_handler.join() - self._inference_handlers.clear() - for requests_responses_connector in self._requests_respones_connectors: - requests_responses_connector.close() - for requests_responses_connector in self._requests_respones_connectors: - requests_responses_connector.join() - self._requests_respones_connectors.clear() - self._serializer_deserializer.close() - - def is_alive(self) -> bool: - """Validate if model is working on Triton. - - If model is fully loaded by Triton, return True. Otherwise, perform a custom verification. - - Returns: - True if model is working, False otherwise - """ - with self._inference_handlers_lock: - return ( - bool(self._inference_handlers) - and bool(self._requests_respones_connectors) - and all(inference_handler.is_alive() for inference_handler in self._inference_handlers) - and all( - requests_responses_connector.is_alive() - for requests_responses_connector in self._requests_respones_connectors - ) - ) - - def _get_triton_model_config(self) -> TritonModelConfig: - """Generate ModelConfig from descriptor and custom arguments for Python model. - - Returns: - ModelConfig object with configuration for Python model deployment - """ - if not self._triton_model_config: - backend_parameters = {"workspace-path": self._workspace.path.as_posix()} - if self._trace_config: - backend_parameters["trace-config"] = base64.b64encode(json.dumps(self._trace_config).encode()).decode() - triton_model_config = TritonModelConfig( - model_name=self.model_name, - model_version=self.model_version, - batching=self.config.batching, - batcher=self.config.batcher, - max_batch_size=self.config.max_batch_size, - decoupled=self.config.decoupled, - backend_parameters=backend_parameters, - instance_group={DeviceKind.KIND_CPU: len(self.infer_functions)}, - ) - inputs = [] - for idx, input_spec in enumerate(self.inputs, start=1): - input_name = input_spec.name if input_spec.name else f"INPUT_{idx}" - tensor = TensorSpec( - name=input_name, dtype=input_spec.dtype, shape=input_spec.shape, optional=input_spec.optional - ) - inputs.append(tensor) - - outputs = [] - for idx, output_spec in enumerate(self.outputs, start=1): - output_name = output_spec.name if output_spec.name else f"OUTPUT_{idx}" - tensor = TensorSpec(name=output_name, dtype=output_spec.dtype, shape=output_spec.shape) - outputs.append(tensor) - - triton_model_config.inputs = inputs - triton_model_config.outputs = outputs - - if self.config.response_cache: - triton_model_config.response_cache = ResponseCache(enable=True) - - self._triton_model_config = triton_model_config - - return self._triton_model_config - - def on_model_event(self, model_event_handle_fn: ModelEventsHandler): - """Register ModelEventsHandler callable. - - Args: - model_event_handle_fn: function to be called when model events arises - """ - with self._observers_lock: - self._model_events_observers.append(model_event_handle_fn) - - def _notify_model_events_observers(self, event: ModelEvent, context: typing.Any): - with self._observers_lock: - for model_event_handle_fn in self._model_events_observers: - model_event_handle_fn(self, event, context) - - def _on_inference_handler_event( - self, proxy_backend: InferenceHandler, event: InferenceHandlerEvent, context: typing.Optional[typing.Any] = None - ): - if event in [InferenceHandlerEvent.CLOSING, InferenceHandlerEvent.UNRECOVERABLE_ERROR]: - self._notify_model_events_observers(ModelEvent.RUNTIME_TERMINATING, context) - elif event == InferenceHandlerEvent.CLOSED: - self._notify_model_events_observers(ModelEvent.RUNTIME_TERMINATED, context) diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/__init__.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/__init__.py deleted file mode 100644 index 8010bd32129eb99ce3ce66981b81d3ba41bf287b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# noqa: D104 diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/communication.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/communication.py deleted file mode 100644 index 07b968f529af6131e2909c88e4cd474ce89e4802..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/communication.py +++ /dev/null @@ -1,555 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Module handling communication between RequestsServer and RequestsServerClients.""" - -import asyncio -import enum -import functools -import json -import logging -import pathlib -import socket -import threading -import time -import traceback -import typing -import uuid -from concurrent.futures import Future as ConcurrentFuture - -import zmq # pytype: disable=import-error -import zmq.asyncio # pytype: disable=import-error - -LOGGER = logging.getLogger(__name__) -SERVER_LOGGER = LOGGER.getChild("server") -CLIENT_LOGGER = LOGGER.getChild("client") - -_STARTUP_TIMEOUT_S = 1.0 - - -class PyTritonResponseFlags(enum.IntFlag): - """Response flags for PyTritonInferenceHandler.""" - - EOS = enum.auto() # End Of Stream - ERROR = enum.auto() - - -class _RequestsServerState(enum.Enum): - STOPPED = enum.auto() - STARTING = enum.auto() - STARTED = enum.auto() - STOPPING = enum.auto() - - -def _set_current_task_name(name: str): - current_task = asyncio.current_task() - if current_task is not None: - current_task.set_name(name) - - -_RequestScope = typing.Dict[str, typing.Any] -_HandleRequestsCoro = typing.Callable[[_RequestScope, bytes, zmq.asyncio.Socket], typing.Awaitable[typing.Any]] -HandleResponsesCoro = typing.Callable[[_RequestScope, asyncio.Queue, ConcurrentFuture], typing.Awaitable[typing.Any]] - - -class RequestsServer: - """Class for serving available inference requests and passing inference responses.""" - - def __init__(self, url: str, handle_responses_fn: HandleResponsesCoro): - """Initialize RequestsServer. - - Args: - url: url to bind socket - handle_responses_fn: couroutine handling responses from InferenceHandler - """ - self._url = url - self._handle_responses_fn = handle_responses_fn - self._state = _RequestsServerState.STOPPED - self._state_condition = threading.Condition() - self._shutdown_event = asyncio.Event() # TODO: is it still required having condition? - self._server_loop = None - - # requests_id -> results asyncio.Queue map - self._responses_queues: typing.Dict[bytes, asyncio.Queue] = {} - self._handle_responses_tasks: typing.Dict[bytes, asyncio.Task] = {} - - def run(self): - """Run RequestsServer. - - It stops when handle_messages coroutine finishes. - - Raises: - RuntimeError: if RequestsServer is already running - """ - with self._state_condition: - if self._state != _RequestsServerState.STOPPED: - raise RuntimeError(f"Cannot run {type(self).__name__} as it is already running") - - self._state = _RequestsServerState.STARTING - self._state_condition.notify_all() - - assert len(self._responses_queues) == 0 - assert len(self._handle_responses_tasks) == 0 - - asyncio.run(self.handle_messages()) - - @property - def server_loop(self) -> typing.Optional[asyncio.AbstractEventLoop]: - """Get asyncio loop for RequestsServer. - - Returns: - asyncio.AbstractEventLoop: asyncio loop for RequestsServer or None if server is not started yet - """ - return self._server_loop - - def wait_till_running(self): - """Wait till RequestsServer is running. - - Raises: - RuntimeError: if RequestsServer is shutting down or not launched yet - """ - with self._state_condition: - if self._state == _RequestsServerState.STARTING: - self._state_condition.wait_for( - lambda: self._state == _RequestsServerState.STARTED, timeout=_STARTUP_TIMEOUT_S - ) - elif self._state == _RequestsServerState.STOPPED: - raise RuntimeError("Cannot push requests before RequestsServer is started") - elif self._state == _RequestsServerState.STOPPING: - raise RuntimeError(f"Cannot push requests while {type(self).__name__} is shutting down") - - async def handle_messages(self): - """Coroutine for handling messages from InferenceHandler.""" - self._server_loop = asyncio.get_running_loop() - try: - SERVER_LOGGER.debug(f"Binding socket to url='{self._url}'") - self._zmq_context = zmq.asyncio.Context() - self._socket = self._zmq_context.socket(zmq.DEALER) - self._socket.bind(self._url) - except (TypeError, zmq.error.ZMQError) as e: - raise ValueError( - f"Error occurred during binding socket to url='{self._url}' (e: {e})." "RequestsServer will be closed." - ) from e - - _set_current_task_name("handle_messages") - - with self._state_condition: - if self._state != _RequestsServerState.STARTING: - self._state = _RequestsServerState.STOPPED - self._state_condition.notify_all() - raise RuntimeError(f"Cannot start {type(self).__name__} as it is not in STARTING state") - - self._state = _RequestsServerState.STARTED - self._state_condition.notify_all() - - def _all_responses_processed(): - return not any([self._handle_responses_tasks, self._responses_queues]) - - try: - flag_check_interval_s = 1.0 - # have to receive mssages untill all requestss to be processed, despite shutdown event is set - while not self._shutdown_event.is_set() or not _all_responses_processed(): - requests_id = b"" - try: - requests_id, flags, responses_payload = await asyncio.wait_for( - self._socket.recv_multipart(), flag_check_interval_s - ) - flags = int.from_bytes(flags, byteorder="big") - responses_queue = self._responses_queues[requests_id] - responses_queue.put_nowait((flags, responses_payload)) # queue have no max_size - except asyncio.TimeoutError: - continue - except KeyError: - SERVER_LOGGER.warning(f"Received response for unknown requests {requests_id.hex()}. Ignoring it.") - except asyncio.CancelledError: - SERVER_LOGGER.info("Received CancelledError") - self._shutdown_event.set() - finally: - # Received all responses, close socket - SERVER_LOGGER.debug("Closing socket") - try: - if self._socket is not None: - self._socket.close(linger=0) - self._socket = None - except zmq.error.ZMQError as e: - SERVER_LOGGER.error(f"Error occurred during closing socket (e: {e}).") - - try: - if self._zmq_context is not None: - self._zmq_context.term() - self._zmq_context = None - except zmq.error.ZMQError as e: - SERVER_LOGGER.error(f"Error occurred during closing zmq context (e: {e}).") - - self._server_loop = None - - with self._state_condition: - self._state = _RequestsServerState.STOPPED - self._state_condition.notify_all() - - SERVER_LOGGER.debug("Socket for handle_messages task closed") - self._shutdown_event.clear() - SERVER_LOGGER.debug(f"Leaving handle_messages task from {type(self).__name__}") - - def shutdown(self): - """Close RequestsServer. - - Don't wait for handle_messages coroutine to finish. - """ - SERVER_LOGGER.debug("Closing RequestsServer") - with self._state_condition: - self._state = _RequestsServerState.STOPPING - self._state_condition.notify_all() - self._shutdown_event.set() - - async def send_requests( - self, requests_id: bytes, requests_payload: bytes, responses_future: ConcurrentFuture - ) -> asyncio.Task: - """Send requests to InferenceHandler. - - Args: - requests_id: id of requests - requests_payload: payload of requests - responses_future: future for waiting in another thread - - Returns: - asyncio.Task: task handling responses from InferenceHandler - - Raises: - RuntimeError: if RequestsServer is shutting down or requests_id is already pending - """ - if self._shutdown_event.is_set(): - SERVER_LOGGER.debug(f"Cannot send requests while {type(self).__name__} is {self._state.name}") - raise RuntimeError(f"Cannot send requests while {type(self).__name__} is {self._state.name}") - - if requests_id in self._responses_queues or requests_id in self._handle_responses_tasks: - SERVER_LOGGER.debug(f"Cannot send requests with id {requests_id.hex()} as such id is already pending") - raise RuntimeError(f"Cannot send requests with id {requests_id.hex()} as such id is already pending") - - _set_current_task_name(f"send_requests-{requests_id.hex()}") - - self._responses_queues[requests_id] = asyncio.Queue() - scope = {"requests_id": requests_id} - handle_responses_task = self._server_loop.create_task( - self._handle_responses(scope, self._responses_queues[requests_id], responses_future), - name=f"handle_responses-{requests_id.hex()}", - ) - self._handle_responses_tasks[requests_id] = handle_responses_task - - # FIXME: check if can not copy buffers; in case copy=False send_multipart returns MessageTracker - # https://pyzmq.readthedocs.io/en/latest/api/zmq.html#zmq.Socket.send_multipart - # consider send_pyobject|send_serialized (but it is not multipart) - - # sending in same loop, thus thread as handle_messages - # send_multipart doesn't return anything, as it copies requests_payload - await self._socket.send_multipart([requests_id, requests_payload]) - - return handle_responses_task - - async def _handle_responses(self, scope, responses_queue: asyncio.Queue, responses_future: ConcurrentFuture): - """Handle responses from InferenceHandler. - - Args: - scope: scope for handling responses - responses_queue: queue with responses payload from InferenceHandler - responses_future: future for waiting in another thread - """ - requests_id = scope["requests_id"] - try: - return await self._handle_responses_fn(scope, responses_queue, responses_future) - finally: - self._responses_queues.pop(requests_id) - self._handle_responses_tasks.pop(requests_id) - - -class RequestsServerClient: - """RequestsServer client for handling requests from RequestsServer and sending back responses.""" - - def __init__(self, url: str, handle_requests_fn: _HandleRequestsCoro, name: typing.Optional[str] = None): - """Initialize RequestsServerClient. - - Args: - url: url to connect socket - handle_requests_fn: couroutine handling requests from InferenceHandler - name: name of RequestsServerClient - """ - self._shutdown_event = asyncio.Event() - self._url = url - self._handle_requests_fn = handle_requests_fn - self._handle_requests_tasks: typing.Dict[bytes, asyncio.Task] = {} - self._handle_requests_tasks_condition = asyncio.Condition() - self._name = name or f"requests_server_client-{uuid.uuid4().hex[-4:]}" - self._loop = None - - def run(self): - """Run RequestsServerClient. - - It stops when handle_requests coroutine finishes. - """ - asyncio.run(self.handle_requests()) - - def shutdown(self) -> None: - """Close RequestsServerClient. - - Don't wait for handle_requests coroutine to finish. - """ - CLIENT_LOGGER.debug(f"Closing {type(self).__name__} {self._name}") - self._shutdown_event.set() - - async def handle_requests(self): - """Coroutine for handling requests from RequestsServer.""" - name = self._name - _set_current_task_name(name) - - zmq_context = None - socket = None - self._loop = asyncio.get_running_loop() - try: - CLIENT_LOGGER.debug(f"Connecting {name} to server listening on {self._url}") - zmq_context = zmq.asyncio.Context() - socket = zmq_context.socket(zmq.DEALER) - socket.connect(self._url) - - send = functools.partial(self._send, socket) - - flag_check_interval_s = 1.0 - while True: - try: - requests_id, requests_payloads = await asyncio.wait_for( - socket.recv_multipart(), flag_check_interval_s - ) - scope = {"requests_id": requests_id} - CLIENT_LOGGER.debug(f"{requests_id.hex()} received requests") - handle_requests_task = self._loop.create_task(self._handle_requests(scope, requests_payloads, send)) - self._handle_requests_tasks[requests_id] = handle_requests_task - handle_requests_task.set_name(f"handle_requests-{requests_id.hex()}") - except asyncio.TimeoutError: - if self._shutdown_event.is_set(): - break - continue - - CLIENT_LOGGER.debug("Waiting for handle_requests tasks to finish") - async with self._handle_requests_tasks_condition: - await self._handle_requests_tasks_condition.wait_for(lambda: len(self._handle_requests_tasks) == 0) - CLIENT_LOGGER.debug("All handle_requests tasks finished") - - except zmq.error.ZMQError: - CLIENT_LOGGER.exception( - "Connection error occurred during reading requests. " f"{type(self).__name__} will be closed." - ) - self._shutdown_event.set() - except Exception: - CLIENT_LOGGER.exception(f"Internal {type(self).__name__}. " f"{type(self).__name__} will be closed.") - self._shutdown_event.set() - finally: - try: - socket_close_timeout_ms = 0 # immediate close (drop not sent messages) - if socket is not None: - socket.close(linger=socket_close_timeout_ms) - except zmq.error.ZMQError as e: - CLIENT_LOGGER.error(f"Error occurred during closing socket (e: {e}).") - - try: - if zmq_context is not None: - zmq_context.term() - except zmq.error.ZMQError as e: - CLIENT_LOGGER.error(f"Error occurred during closing zmq context (e: {e}).") - - CLIENT_LOGGER.debug(f"Socket for {name} closed") - self._shutdown_event.clear() - self._loop = None - CLIENT_LOGGER.debug(f"Leaving {name}") - - @property - def name(self) -> str: - """Get name of RequestsServerClient. - - Returns: - name of RequestsServerClient - """ - return self._name - - @property - def loop(self) -> asyncio.AbstractEventLoop: - """Get asyncio loop for RequestsServerClient. - - Returns: - asyncio.AbstractEventLoop: asyncio loop for RequestsServerClient - """ - return self._loop - - async def _handle_requests(self, scope, requests_payload, send): - try: - await self._handle_requests_fn(scope, requests_payload, send) - # except PyTritonUnrecoverableError: - # error = traceback.format_exc() - # responses = InferenceHandlerResponses(error=error) - # CLIENT_LOGGER.error( - # "Unrecoverable error thrown during calling model callable. " - # "Shutting down Triton Inference Server. " - # f"{error}" - # ) - # self.stopped = True - # self._notify_proxy_backend_observers(InferenceHandlerEvent.UNRECOVERABLE_ERROR, error) - # CLIENT_LOGGER.debug(f"Send response to proxy model for {model_name}.") - # send(responses.as_bytes()) - except Exception: - error = traceback.format_exc() - flags = PyTritonResponseFlags.ERROR | PyTritonResponseFlags.EOS - await send(scope, flags, error.encode()) - CLIENT_LOGGER.error(f"Error occurred during handling requests {scope['requests_id'].hex()}\n{error}") - finally: - async with self._handle_requests_tasks_condition: - self._handle_requests_tasks.pop(scope["requests_id"], None) - self._handle_requests_tasks_condition.notify() - CLIENT_LOGGER.debug(f"Finished handling requests {scope['requests_id'].hex()}") - - async def _send(self, socket, scope, flags, requests_payload): - """Send requests to RequestsServer. - - Args: - socket: socket for sending requests - scope: scope for sending requests - flags: flags for sending requests - requests_payload: payload of requests - """ - flags = flags.to_bytes(1, "big") - await socket.send_multipart([scope["requests_id"], flags, requests_payload]) - - -class HandshakeServer(threading.Thread): - """Handshake server for passing config.""" - - def __init__(self, socket_path: pathlib.Path, inference_handler_config) -> None: - """Initialize HandshakeServer. - - Args: - socket_path: path to socket - inference_handler_config: config for InferenceHandler - """ - super().__init__(daemon=True, name="handshake-server") - self._socket_path = socket_path - try: - self._config_payload = json.dumps(inference_handler_config).encode() - except TypeError: - raise ValueError(f"InferenceHandler config is not serializable: {inference_handler_config}") from None - - self._server = None - self._error_from_thread = None - - def start(self): - """Start HandshakeServer. - - Raises: - RuntimeError: if HandshakeServer is already running or error occurred during starting - """ - if self._server: - raise RuntimeError("HandshakeServer is already running") - - super().start() - while self._server is None and not self._error_from_thread: - time.sleep(0.001) - if self._error_from_thread is not None: - raise self._error_from_thread - - def run(self): - """Run HandshakeServer.""" - asyncio.run(self._run()) - - async def _run(self): - try: - self._server = await asyncio.start_unix_server(self._handle_request, self._socket_path) - async with self._server: - try: - await self._server.serve_forever() - except asyncio.CancelledError: - pass - except Exception as e: - SERVER_LOGGER.error(f"Error occurred during running handshake server (e: {e})") - self._error_from_thread = e - - def close(self): - """Close HandshakeServer.""" - loop = self._server.get_loop() - loop_tasks = asyncio.all_tasks(loop=loop) - for task in loop_tasks: - loop.call_soon_threadsafe(task.cancel) - - self.join() - SERVER_LOGGER.debug("Closed handshake server") - - async def _handle_request(self, reader, writer): - peername = writer.get_extra_info("peername") - try: - request_name = await asyncio.wait_for(reader.readuntil(b"\n"), timeout=1.0) - - if request_name == b"get_config\n": - writer.write(len(self._config_payload).to_bytes(4, "big")) - writer.write(self._config_payload) - await writer.drain() - else: - SERVER_LOGGER.warning(f"Unknown request {request_name} from {peername}") - - except asyncio.TimeoutError: - SERVER_LOGGER.debug(f"Timeout occurred during handling request from {peername}") - except Exception as e: - SERVER_LOGGER.error(f"Error occurred during handling request from {peername} (e: {e})") - finally: - writer.close() - await writer.wait_closed() - - -def get_config_from_handshake_server(socket_path: pathlib.Path, timeout_s: float = 1.0) -> dict: - """Get config from handshake server. - - Args: - socket_path: path to socket - timeout_s: timeout for waiting for the response - - Returns: - config from handshake server - - Raises: - TimeoutError: if timeout occurred while waiting for the response - ValueError: if invalid JSON response from the server - """ - should_stop_before_s = time.time() + timeout_s - sock = None - try: - LOGGER.debug(f"Waiting for config file {socket_path}") - while not socket_path.exists() and time.time() < should_stop_before_s: - time.sleep(0.001) - - if not socket_path.exists(): - raise TimeoutError(f"Timeout occurred while waiting for config file {socket_path}") - - sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - sock.settimeout(max(0.0, should_stop_before_s - time.time())) - sock.connect(socket_path.as_posix()) - sock.sendall(b"get_config\n") - - sock.settimeout(max(0.0, should_stop_before_s - time.time())) - payload_size = sock.recv(4) - payload_size = int.from_bytes(payload_size, "big") - - sock.settimeout(max(0.0, should_stop_before_s - time.time())) - config_payload = sock.recv(payload_size) - config = json.loads(config_payload) - return config - except socket.timeout as e: - raise TimeoutError(f"Timeout occurred while waiting for config file {socket_path}") from e - except json.JSONDecodeError as e: - raise ValueError("Invalid JSON response from the server.") from e - finally: - if sock is not None: - sock.close() diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/data.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/data.py deleted file mode 100644 index d8201738fd45cb7729338c1adb40fbd5986aedd6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/data.py +++ /dev/null @@ -1,1133 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Communication utility module. - -It is used for interaction between model and proxy_backend. -""" - -import abc -import atexit -import base64 -import ctypes -import ctypes.util -import dataclasses -import fcntl -import gc -import json -import logging -import math -import multiprocessing.managers -import multiprocessing.popen_spawn_posix -import multiprocessing.shared_memory -import os -import pathlib -import signal -import struct -import threading -import time -import uuid -import weakref -from typing import Dict, List, Literal, Optional, Sequence, Tuple, Union - -import numpy as np - -from .telemetry import get_span_dict, start_span_from_remote -from .types import Request, Requests, Response, Responses - -LOGGER = logging.getLogger(__name__) - -PROTOCOL_VERSION = "3" - - -# copy from -# https://github.com/triton-inference-server/python_backend/blob/main/src/resources/triton_python_backend_utils.py - - -def _serialize_byte_tensor(tensor) -> bytes: - """Serializes a bytes tensor into a flat numpy array of length prepended bytes. - - The numpy array should use dtype of np.object_. For np.bytes_, - numpy will remove trailing zeros at the end of byte sequence and because - of this it should be avoided. - - Args: - tensor: The bytes tensor to serialize. - - Returns: - serialized array as bytes buffer. - - Raises: - UnicodeEncodeErrors: raised when try to cast to string of non-bytes items fails - """ - if tensor.size == 0: - return b"" - - # If the input is a tensor of string/bytes objects, then must flatten those - # into a 1-dimensional array containing the 4-byte byte size followed by the - # actual element bytes. All elements are concatenated together in "C" order. - assert (tensor.dtype == np.object_) or (tensor.dtype.type == np.bytes_) - flattened_ls = [] - total_len = 0 - for obj in np.nditer(tensor, flags=["refs_ok"], order="C"): - # If directly passing bytes to BYTES type, - # don't convert it to str as Python will encode the - # bytes which may distort the meaning - if tensor.dtype == np.object_ and not isinstance(obj.item(), bytes): - s = str(obj.item()).encode("utf-8") - else: - s = obj.item() - item_len = len(s) - flattened_ls.append(struct.pack(" np.ndarray: - """Deserializes an encoded bytes tensor into an numpy array of dtype of python objects. - - Args: - encoded_tensor : The encoded bytes tensor where each element has its length in - first 4 bytes followed by the content - dtype: The dtype of the numpy array to deserialize to. - order: The order of the numpy array to deserialize to. - - Returns: - The 1-D numpy array of type object containing the deserialized bytes in 'C' order. - """ - strs = [] - offset = 0 - val_buf = encoded_tensor - val_len = struct.unpack_from(" bytes: - header_format = _PARTIAL_HEADER_FORMAT + "Q" * len(shape) - dtype_descr = np.lib.format.dtype_to_descr(dtype) - assert ( - len(dtype_descr) <= _MAX_DTYPE_DESCR - ), f"dtype descr is too long; dtype_descr={dtype_descr} max={_MAX_DTYPE_DESCR}" - return struct.pack(header_format, dtype_descr.encode("utf-8"), order.encode("ascii"), len(shape), *shape) - - -def _unpack_header(header: bytes) -> Tuple[Tuple[int, ...], np.dtype, Literal["C", "F"]]: - shape_offset = struct.calcsize(_PARTIAL_HEADER_FORMAT) - dtype_descr, order, ndim = struct.unpack_from(_PARTIAL_HEADER_FORMAT, header, offset=0) - shape = struct.unpack_from("Q" * ndim, header, offset=shape_offset) - dtype = np.lib.format.descr_to_dtype(dtype_descr.decode("utf-8").rstrip("\x00")) - order = order.decode("ascii") - return shape, dtype, order - - -def serialize_numpy_with_struct_header(tensor: np.ndarray) -> List[Union[bytes, memoryview]]: - """Serialize numpy array to list of bytes and memoryviews. - - Args: - tensor: numpy array to serialize - - Returns: - List of data frames in form of bytes and memoryviews - """ - if tensor.dtype.hasobject: - data = _serialize_byte_tensor(tensor.ravel()) - order = "C" # as _serialize_byte_tensor returns C-ordered array - else: - if not tensor.data.contiguous: - tensor = np.ascontiguousarray(tensor) - data = tensor.data - order = "C" if tensor.flags.c_contiguous else "F" - - header = _pack_header(tensor.shape, tensor.dtype, order) - frames = [header, data] - return frames - - -def deserialize_numpy_with_struct_header(frames: List[Union[bytes, memoryview]]) -> np.ndarray: - """Deserialize numpy array from list of bytes and memoryviews. - - Args: - frames: List of data frames in form of bytes and memoryviews - - Returns: - numpy array - """ - header, data = frames - shape, dtype, order = _unpack_header(header) - if dtype.hasobject: - tensor = _deserialize_bytes_tensor(data, dtype).reshape(shape) - else: - tensor = np.ndarray(shape, dtype=dtype, buffer=data, order=order) - return tensor - - -def calc_serialized_size_of_numpy_with_struct_header(tensor: np.ndarray) -> List[int]: - """Calculate size of serialized numpy array. - - Args: - tensor: numpy array to serialize - - Returns: - List of sizes of data frames - """ - header_size = struct.calcsize(_PARTIAL_HEADER_FORMAT) + struct.calcsize("Q") * len(tensor.shape) - if tensor.dtype.hasobject: - items_sizes = [] - order = "C" if tensor.flags.c_contiguous else "F" - for obj in np.nditer(tensor, flags=["refs_ok"], order=order): - if tensor.dtype == np.object_ and not isinstance(obj.item(), bytes): - s = str(obj.item()).encode("utf-8") - else: - s = obj.item() - items_sizes.append(len(s)) - - # total_size + for size of each item + each item - data_size = struct.calcsize(" offset: - free_blocks.append( - BlockDescriptor(self.shared_memory.name, offset=offset, size=used_block.offset - offset) - ) - offset = used_block.offset + used_block.size - # if tail is free - if offset < total_size: - free_blocks.append(BlockDescriptor(self.shared_memory.name, offset=offset, size=total_size - offset)) - - self.free_blocks = free_blocks - self.max_free_block_size = max(block.size for block in self.free_blocks) if self.free_blocks else 0 - - def __contains__(self, block_id: str) -> bool: - with self.used_blocks_lock: - return any(block_id == block.id for block in self.used_blocks) # pytype: disable=attribute-error - - def __getitem__(self, block_id: str) -> BlockDescriptor: - with self.used_blocks_lock: - for block in self.used_blocks: - if block.id == block_id: # pytype: disable=attribute-error - return block - raise KeyError(f"Block with id {block_id} not found in segment {self.shared_memory.name}") - - def allocate(self, offset, byte_size): - block = BlockDescriptor(self.shared_memory.name, offset=offset, size=byte_size) - with self.used_blocks_lock: - self.used_blocks.append(block) - self.used_blocks.sort(key=lambda block: block.offset) - self._update_free_blocks() - return block - - def release(self, block: BlockDescriptor): - with self.used_blocks_lock: - self.used_blocks.remove(block) - self._update_free_blocks() - - -class _DataBlocksServer: - _instance = None - _cnt = 0 - _minimal_segment_size = 4096 # 4KB - - def __new__(cls): - if cls._instance is None: - cls._instance = super().__new__(cls) - return cls._instance - - def __init__(self): - # WAR: for some reason, the __init__ is called on each create of proxy object - if self._cnt == 1: - return - self._cnt += 1 - self._id = uuid.uuid4() # to verify that it is singleton across processes - self._segments = [] - self._segments_lock = threading.RLock() - atexit.register(self.close) - - def get_free_blocks(self, bytes_sizes: Sequence[int]) -> Sequence[str]: - tensors_ids = [] - with self._segments_lock: - for byte_size in bytes_sizes: - for segment in self._segments: - if segment.max_free_block_size >= byte_size: - for free_block in segment.free_blocks: - if free_block.size >= byte_size: - block = self._allocate_block(segment, free_block.offset, byte_size) - tensors_ids.append(block.id) # pytype: disable=attribute-error - break - else: - continue # If no suitable block was found, try the next segment - break # If a suitable block was found, don't try any more segments - else: # If no suitable block was found in any segment - new_segment_size = int( - max(self._minimal_segment_size, math.pow(2, math.ceil(math.log2(byte_size)))) - ) - block = self._allocate_block( - self._create_new_segment(new_segment_size), offset=0, byte_size=byte_size - ) - tensors_ids.append(block.id) # pytype: disable=attribute-error - return tensors_ids - - def release_block(self, block_id: str): - with self._segments_lock: - for segment in self._segments: - try: - block = segment[block_id] - segment.release(block) - return - except KeyError: - pass - raise KeyError(f"Block with id {block_id} not found in server") - - def _allocate_block(self, segment: _SharedMemorySegment, offset: int, byte_size: int) -> BlockDescriptor: - return segment.allocate(offset, byte_size) - - def _create_new_segment(self, segment_size): - segment = _SharedMemorySegment(segment_size) - self._segments.append(segment) - return segment - - def get_debug_status(self): - return { - "server_id": str(self._id), - "host_pid": multiprocessing.current_process().pid, - "segments": [ - { - "shared_memory": segment.shared_memory.name, - "used_blocks": [str(block) for block in segment.used_blocks], - } - for segment in self._segments - ], - } - - def close(self): - multiprocessing.util.debug(f"Closing server {self._id}") - with self._segments_lock: - while self._segments: - segment = self._segments.pop() - multiprocessing.util.debug(f"Closing and delete segment {segment.shared_memory.name}") - segment.shared_memory.close() - segment.shared_memory.unlink() - - -class BlocksStoreManager(multiprocessing.managers.BaseManager): - """Remote block store for storing and retrieving numpy arrays in/from shared memory.""" - - @classmethod - def _run_server(cls, registry, address, authkey, serializer, writer, initializer=None, initargs=()): - PR_SET_PDEATHSIG = 1 # noqa - libc = ctypes.CDLL(ctypes.util.find_library("c"), use_errno=True) - libc.prctl(PR_SET_PDEATHSIG, signal.SIGTERM) # terminate process when parent **thread** dies - - if bool(os.environ.get("PYTRITON_VIZTRACER")): - from viztracer import VizTracer # type: ignore # pytype: disable=import-error - - cls._tracer = VizTracer(log_async=True, log_gc=True, tracer_entries=10000000, pid_suffix=True) - cls._tracer.register_exit() - cls._tracer.start() - - super()._run_server( - registry, address, authkey, serializer, writer, initializer, initargs - ) # pytype: disable=attribute-error - - -class _DataBlocksServerProxy(multiprocessing.managers.BaseProxy): - def release_block(self, /, *args, **kwargs): - return self._callmethod("release_block", args, kwargs) - - def get_free_blocks(self, /, *args, **kwargs): - return self._callmethod("get_free_blocks", args, kwargs) - - def _get_debug_status(self, /, *args, **kwargs): - return self._callmethod("get_debug_status", args, kwargs) - - def close(self, /, *args, **kwargs): - return self._callmethod("close", args, kwargs) - - -BlocksStoreManager.register("blocks", _DataBlocksServer, proxytype=_DataBlocksServerProxy) - - -class _FileLock: - _locks = {} - - def __new__(cls, file_path): - if file_path not in cls._locks: - cls._locks[file_path] = super().__new__(cls) - return cls._locks[file_path] - - def __init__(self, file_path): - if hasattr(self, "_file_path"): - return - self._file_path = pathlib.Path(file_path) - self._file_lock = None - self._lock = threading.RLock() - atexit.register(self._clean) - - def __enter__(self): - self._file_lock = self._file_path.open("a") - fcntl.flock(self._file_lock.fileno(), fcntl.LOCK_EX) - self._lock.acquire() - - def __exit__(self, exc_type, exc_value, traceback): - fcntl.flock(self._file_lock.fileno(), fcntl.LOCK_UN) - self._lock.release() - - def _clean(self): - if self._file_lock is not None: - self._file_lock.close() - try: - self._file_path.unlink(missing_ok=True) - except OSError as e: - LOGGER.warning(f"Could not remove lock file {self._file_path}; {e}") - - -class _Popen(multiprocessing.popen_spawn_posix.Popen): - def _launch(self, process_obj): - # Modified version of multiprocessing.popen_spawn_posix.Popen._launch - import io - import os - from multiprocessing import context, resource_tracker, spawn, util - - tracker_fd = resource_tracker.getfd() - self._fds.append(tracker_fd) # pytype: disable=attribute-error - - # get prep_data + remove init_main_from* as they are not required for TensorStore process - prep_data = spawn.get_preparation_data(process_obj._name) - prep_data.pop("init_main_from_module", None) - prep_data.pop("init_main_from_path", None) - - fp = io.BytesIO() - context.set_spawning_popen(self) - try: - context.reduction.dump(prep_data, fp) # pytype: disable=module-attr - context.reduction.dump(process_obj, fp) # pytype: disable=module-attr - finally: - context.set_spawning_popen(None) - - parent_r = child_w = child_r = parent_w = None - try: - parent_r, child_w = os.pipe() - child_r, parent_w = os.pipe() - cmd = spawn.get_command_line(tracker_fd=tracker_fd, pipe_handle=child_r) - self._fds.extend([child_r, child_w]) # pytype: disable=attribute-error - self.pid = util.spawnv_passfds( - spawn.get_executable(), - cmd, - self._fds, # pytype: disable=attribute-error,wrong-arg-types - ) - self.sentinel = parent_r - with open(parent_w, "wb", closefd=False) as f: - f.write(fp.getbuffer()) - finally: - fds_to_close = [] - for fd in (parent_r, parent_w): - if fd is not None: - fds_to_close.append(fd) - self.finalizer = util.Finalize(self, util.close_fds, fds_to_close) # pytype: disable=module-attr - - for fd in (child_r, child_w): - if fd is not None: - os.close(fd) - - -class _SpawnProcess(multiprocessing.process.BaseProcess): - _start_method = "spawn" - - @staticmethod - def _Popen(process_obj): # noqa N802 - return _Popen(process_obj) - - -class _SpawnContext(multiprocessing.context.BaseContext): - _name = "spawn" - Process = _SpawnProcess - - -class TensorStore: - """Tensor store for storing and retrieving numpy arrays in/from shared memory.""" - - _SOCKET_EXISTANCE_CHECK_INTERVAL_S = 0.1 - _instances = {} - - def __new__(cls, *args, **kwargs): - """Create TensorStore object. If object with given address already exists, return it.""" - if args: - address = args[0] - elif "address" in kwargs: - address = kwargs["address"] - else: - raise TypeError("TensorStore() missing 1 required positional argument: 'address'") - - address = address.as_posix() if isinstance(address, pathlib.Path) else address - - if address not in cls._instances: - cls._instances[address] = super().__new__(cls) - - return cls._instances[address] - - def __init__(self, address: Union[str, pathlib.Path], auth_key: Optional[bytes] = None): - """Initialize TensorStore object. - - Args: - address: address of data store - auth_key: authentication key required to setup connection. If not provided, current process authkey will be used - """ - if not hasattr(self, "_remote_blocks_store_manager"): - address = address.as_posix() if isinstance(address, pathlib.Path) else address - self._remote_blocks_store_manager = BlocksStoreManager(address, authkey=auth_key, ctx=_SpawnContext()) - self._remote_blocks_store = None - self._manager_start_stop_filelock = _FileLock(f"{address}.lock") - - # container for keeping map between tensor_id and numpy array weak ref - self._handled_blocks: Dict[str, weakref.ReferenceType] = {} - self._handled_blocks_lock = threading.RLock() - - self._shm_segments: Dict[str, multiprocessing.shared_memory.SharedMemory] = {} - self._shm_segments_lock = threading.RLock() - - self.serialize = serialize_numpy_with_struct_header - self.deserialize = deserialize_numpy_with_struct_header - self._calc_serialized_tensor_size = calc_serialized_size_of_numpy_with_struct_header - - @property - def address(self) -> str: - """Return address of remote block store.""" - return self._remote_blocks_store_manager.address - - def start(self): - """Start remote block store.""" - with self._manager_start_stop_filelock: - if self._remote_blocks_store is not None: - raise RuntimeError("Remote block store is already started/connected") - - self._remote_blocks_store_manager.start() - self._remote_blocks_store = self._remote_blocks_store_manager.blocks() # pytype: disable=attribute-error - - address = pathlib.Path(self._remote_blocks_store_manager.address) - self._wait_for_address(address) - LOGGER.debug( - f"Started remote block store at {address} (pid={self._remote_blocks_store_manager._process.pid})" # pytype: disable=attribute-error - ) - - def connect(self, timeout_s: Optional[float] = None): - """Connect to remote block store.""" - if self._remote_blocks_store is None: - address = pathlib.Path(self._remote_blocks_store_manager.address) - - self._wait_for_address(address, timeout_s) - self._remote_blocks_store_manager.connect() - self._remote_blocks_store = self._remote_blocks_store_manager.blocks() # pytype: disable=attribute-error - LOGGER.debug(f"Connected to remote block store at {address})") - else: - LOGGER.debug(f"Already connectd to remote block store at {self.address}") - - def _wait_for_address(self, address, timeout_s: Optional[float] = None): - should_stop_at = time.time() + timeout_s if timeout_s is not None else None - if timeout_s is not None and self._SOCKET_EXISTANCE_CHECK_INTERVAL_S > timeout_s: - socket_existance_check_interval = timeout_s - else: - socket_existance_check_interval = self._SOCKET_EXISTANCE_CHECK_INTERVAL_S - - while not address.exists(): - if should_stop_at is not None and time.time() >= should_stop_at: - raise TimeoutError(f"Timeout while waiting for {address} to be created") - time.sleep(socket_existance_check_interval) - - def _calc_serialized_size(self, tensor: np.ndarray) -> int: - # frames payload sum + total size + frames sizes - # assume 2 frames: header with tensor description + data - return sum(self._calc_serialized_tensor_size(tensor)) + struct.calcsize(" Sequence[str]: - """Append tensor to shared memory buffer. - - Args: - tensors: numpy arrays to store - - Returns: - List of ids of stored tensors - """ - byte_size_of_frames_containers = [self._calc_serialized_size(tensor) for tensor in tensors] - tensors_ids = self._remote_blocks_store.get_free_blocks(byte_size_of_frames_containers) - blocks = [BlockDescriptor.from_id(tensor_id) for tensor_id in tensors_ids] - - for tensor, block in zip(tensors, blocks): - with self._shm_segments_lock: - shm = self._shm_segments.get(block.shm_name) - if shm is None: - shm = multiprocessing.shared_memory.SharedMemory(block.shm_name, create=False) - self._shm_segments[block.shm_name] = shm - - frames = self.serialize(tensor) - self._copy_frames(frames, shm, block.offset) - - return tensors_ids - - def get(self, tensor_id: str) -> np.ndarray: - """Get numpy array from tensor store. - - Args: - tensor_id: id of of tenosr to get - - Returns: - numpy array - """ - tensor = None - # try to handle already handled tensor from weakref - with self._handled_blocks_lock: - tensor_ref = self._handled_blocks.get(tensor_id) - if tensor_ref is not None: - tensor = tensor_ref() - - if tensor is None: # if tensor was not handled yet or weakref is already empty - block = BlockDescriptor.from_id(tensor_id) - - # check if shm segment is already opened - with self._shm_segments_lock: - shm = self._shm_segments.get(block.shm_name) - - # if not open it and put into cache - if shm is None: - shm = multiprocessing.shared_memory.SharedMemory(block.shm_name, create=False) - with self._shm_segments_lock: - shm = self._shm_segments.setdefault(block.shm_name, shm) # in meantime other thread could create it - - frames = self._handle_frames(shm, block.offset) - tensor = self.deserialize(frames) - - # store tensor in weakref to be able to release shared memory when tensor will be garbage collected - with self._handled_blocks_lock: - tensor_ref = self._handled_blocks.setdefault(tensor_id, weakref.ref(tensor)) - tensor = tensor_ref() - - return tensor # pytype: disable=bad-return-type - - def release_block(self, tensor_id: str): - """Release shared memory block. - - Args: - tensor_id: id of tensor to release - """ - tensor_ref = None - with self._handled_blocks_lock: - tensor_ref = self._handled_blocks.pop(tensor_id, None) - - try: - if tensor_ref is not None: - self._remote_blocks_store.release_block(tensor_id) - except OSError: # thrown when remote process is already closed - LOGGER.warning( - f"Failed to release block {tensor_id} on remote process at {self.address}. Probably remote process is already closed" - ) - - def _copy_frames( - self, - frames: List[Union[bytes, memoryview]], - shm: multiprocessing.shared_memory.SharedMemory, - offset: int, - ) -> int: - total_size = struct.calcsize(" List[memoryview]: - frames = [] - (total_size,) = struct.unpack_from(" bool: - """Check if remote block store was started by this instance. - - Returns: - True if remote block store was started by this instance, False otherwise - """ - return hasattr(self._remote_blocks_store_manager, "shutdown") - - -def get_debug_status(tensor_store: TensorStore) -> dict: - """Get debug status of remote block store. - - Args: - tensor_store: TensorStore object - - Returns: - Debug status of remote block store - """ - if tensor_store._remote_blocks_store is None: - raise RuntimeError("Remote block store is not initialized") - - return tensor_store._remote_blocks_store._get_debug_status() - - -class BaseRequestsResponsesSerializerDeserializer(abc.ABC): - """Base class for requests/responses serializer/deserializer.""" - - @abc.abstractmethod - def serialize_requests(self, requests: Requests) -> bytes: - """Serialize requests. - - Args: - requests: list of requests to serialize - - Returns: - Serialized requests - """ - pass - - @abc.abstractmethod - def deserialize_requests(self, requests_payload: bytes) -> Requests: - """Deserialize requests. - - Args: - requests_payload: serialized requests - - Returns: - List of deserialized requests - """ - pass - - @abc.abstractmethod - def free_requests_resources(self, requests_payload: bytes): - """Free resources used by requests.""" - pass - - @abc.abstractmethod - def serialize_responses(self, responses: Responses) -> bytes: - """Serialize responses. - - Args: - responses: list of responses to serialize - - Returns: - Serialized responses - """ - pass - - @abc.abstractmethod - def deserialize_responses(self, responses_payload: bytes) -> Responses: - """Deserialize responses. - - Args: - responses_payload: serialized responses - - Returns: - List of deserialized responses - """ - pass - - @abc.abstractmethod - def free_responses_resources(self, responses_payload: bytes): - """Free resources used by responses.""" - pass - - -class Base64SerializerDeserializer(BaseRequestsResponsesSerializerDeserializer): - """Serializer/deserializer for requests/responses using base64 implementation.""" - - def serialize_requests(self, requests: Requests) -> bytes: - """Serialize requests. - - Args: - requests: list of requests to serialize - - Returns: - Serialized requests - """ - serialized_requests = self._serialize_named_tensors_lists(requests) - requests_list = [] - for request, serialized_request in zip(requests, serialized_requests): - serialized_request = {"data": serialized_request, "parameters": request.parameters} - if request.span is not None: - serialized_request["span"] = get_span_dict(request.span) - requests_list.append(serialized_request) - - requests = {"requests": requests_list} - requests = json.dumps(requests).encode("utf-8") - return requests - - def deserialize_requests(self, requests_payload: bytes) -> Requests: - """Deserialize requests. - - Args: - requests_payload: serialized requests - - Returns: - List of deserialized requests - """ - requests = json.loads(requests_payload) - requests_data = [request["data"] for request in requests["requests"]] - requests_data = self._deserialized_named_tensors_lists(requests_data) - - deserialized_requests = [] - for request, request_data in zip(requests["requests"], requests_data): - kwargs = {"data": request_data, "parameters": request.get("parameters")} - # FIXME: move span creation above just after json.loads - if "span" in request: - span_dict = request["span"] - span = start_span_from_remote(span_dict, "proxy_inference_callable") - kwargs["span"] = span - request_wrapped = Request(**kwargs) - deserialized_requests.append(request_wrapped) - - return deserialized_requests - - def free_requests_resources(self, requests_payload: bytes): - """Free resources used by requests.""" - pass - - def serialize_responses(self, responses: Responses) -> bytes: - """Serialize responses. - - Args: - responses: list of responses to serialize - - Returns: - Serialized responses - """ - responses = self._serialize_named_tensors_lists(responses) - responses = {"responses": [{"data": response} for response in responses]} - return json.dumps(responses).encode("utf-8") - - def deserialize_responses(self, responses_payload: bytes) -> Responses: - """Deserialize responses. - - Args: - responses_payload: serialized responses - - Returns: - List of deserialized responses - """ - if responses_payload: - responses = json.loads(responses_payload) - responses = [response["data"] for response in responses["responses"]] - responses = self._deserialized_named_tensors_lists(responses) - return [Response(data=response) for response in responses] - else: - return [] - - def free_responses_resources(self, responses_payload: bytes): - """Free resources used by responses.""" - pass - - def _serialize_named_tensors_lists(self, named_tensors_lists): - def _encode(_tensor): - frames = serialize_numpy_with_struct_header(_tensor) - return [base64.b64encode(frame).decode("utf-8") for frame in frames] - - return [ - {tensor_name: _encode(tensor) for tensor_name, tensor in tensors.items()} for tensors in named_tensors_lists - ] - - def _deserialized_named_tensors_lists(self, named_tensors_lists): - def _decode(decoded_tensor): - frames = [base64.b64decode(frame.encode("utf-8")) for frame in decoded_tensor] - return deserialize_numpy_with_struct_header(frames) - - return [ - {tensor_name: _decode(encoded_tensor) for tensor_name, encoded_tensor in tensors.items()} - for tensors in named_tensors_lists - ] - - def start(self, url: Union[str, pathlib.Path], authkey: Optional[bytes] = None): - """Start Dummy implementation. - - Args: - url: address of data store - authkey: authentication key required to setup connection. If not provided, current process authkey will be used - """ - pass - - def connect(self, url: Union[str, pathlib.Path], authkey: Optional[bytes] = None): - """Connect to Dummy implementation. - - Args: - url: address of data store - authkey: authentication key required to setup connection. If not provided, current process authkey will be used - """ - pass - - def close(self): - """Close Dummy implementation.""" - pass - - -class TensorStoreSerializerDeserializer(BaseRequestsResponsesSerializerDeserializer): - """Serializer/deserializer for requests/responses using TensorStore.""" - - def __init__(self): - """Initialize TensorStoreSerializerDeserializer object.""" - self._tensor_store = None - - def serialize_requests(self, requests: Requests) -> bytes: - """Serialize requests. - - Args: - requests: list of requests to serialize - - Returns: - Serialized requests - """ - serialized_requests = self._serialize_named_tensors_lists(requests) - requests_list = [] - for request, serialized_request in zip(requests, serialized_requests): - serialized_request = {"data": serialized_request, "parameters": request.parameters} - if request.span is not None: - serialized_request["span"] = get_span_dict(request.span) - requests_list.append(serialized_request) - - requests = {"requests": requests_list} - return json.dumps(requests).encode("utf-8") - - def deserialize_requests(self, requests_payload: bytes) -> Requests: - """Deserialize requests. - - Args: - requests_payload: serialized requests - - Returns: - List of deserialized requests - """ - requests = json.loads(requests_payload) - deserialized_requests = [] - for request in requests["requests"]: - kwargs = {} - if "span" in request: - span_dict = request["span"] - span = start_span_from_remote(span_dict, "proxy_inference_callable") - kwargs["span"] = span - request_data = { - input_name: self._tensor_store.get(tensor_id) - for input_name, tensor_id in request.get("data", {}).items() - } - kwargs["data"] = request_data - kwargs["parameters"] = request.get("parameters") - request_wrapped = Request(**kwargs) - deserialized_requests.append(request_wrapped) - - return deserialized_requests - - def free_requests_resources(self, requests_payload: bytes): - """Free resources used by requests.""" - if requests_payload: - requests = json.loads(requests_payload) - for response in requests["requests"]: - for _, tensor_id in response.get("data", {}).items(): - self._tensor_store.release_block(tensor_id) - - def serialize_responses(self, responses: Responses) -> bytes: - """Serialize responses. - - Args: - responses: list of responses to serialize - - Returns: - Serialized responses - """ - responses = self._serialize_named_tensors_lists(responses) - responses = {"responses": [{"data": response} for response in responses]} - return json.dumps(responses).encode("utf-8") - - def deserialize_responses(self, responses_payload: bytes) -> Responses: - """Deserialize responses. - - Args: - responses_payload: serialized responses - - Returns: - List of deserialized responses - """ - if responses_payload: - responses = json.loads(responses_payload) - return [ - Response( - data={ - input_name: self._tensor_store.get(tensor_id) - for input_name, tensor_id in response.get("data", {}).items() - } - ) - for response in responses["responses"] - ] - else: - return [] - - def free_responses_resources(self, responses_payload: bytes): - """Free resources used by responses.""" - if responses_payload: - responses = json.loads(responses_payload) - for response in responses["responses"]: - for _, tensor_id in response.get("data", {}).items(): - self._tensor_store.release_block(tensor_id) - - def _serialize_named_tensors_lists(self, named_tensors_lists): - values_with_coords = [ - (idx, tensor_name, tensor) - for idx, tensors in enumerate(named_tensors_lists) - for tensor_name, tensor in tensors.items() - ] - tensor_ids = self._tensor_store.put([tensor for _, _, tensor in values_with_coords]) - named_tensors_lists = [{} for _ in range(len(named_tensors_lists))] - for (idx, tensor_name, _), tensor_id in zip(values_with_coords, tensor_ids): - named_tensors_lists[idx][tensor_name] = tensor_id - - return named_tensors_lists - - def start(self, url: Union[str, pathlib.Path], authkey: Optional[bytes] = None): - """Start TensorStore. - - Args: - url: address of data store - authkey: authentication key required to setup connection. If not provided, current process authkey will be used - """ - self._tensor_store = self._create(url, authkey) - self._tensor_store.start() - - def connect(self, url: Union[str, pathlib.Path], authkey: Optional[bytes] = None): - """Connect to TensorStore. - - Args: - url: address of data store - authkey: authentication key required to setup connection. If not provided, current process authkey will be used - """ - self._tensor_store = self._create(url, authkey) - self._tensor_store.connect() - - def _create(self, url: Union[str, pathlib.Path], authkey: Optional[bytes] = None): - authkey = authkey or multiprocessing.current_process().authkey - return TensorStore(url, authkey) - - def close(self): - """Close TensorStore.""" - if self._tensor_store: - # check if run by this serializer/deserializer - if self._tensor_store.is_started(): - debug_status = get_debug_status(self._tensor_store) - used_blocks = [block for segment in debug_status["segments"] for block in segment["used_blocks"]] - if used_blocks: - LOGGER.debug(f"TensorStore used blocks while closing: {used_blocks}") - # raise RuntimeError( - # f"TensorStore at {self._tensor_store.address} is still running. Used blocks: {used_blocks}" - # ) - LOGGER.debug(f"Closing TensorStore process at {self._tensor_store.address}") - - self._tensor_store.close() - self._tensor_store = None diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/inference.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/inference.py deleted file mode 100644 index dccb2c61d2dae317e06bbd62e25c6d9fcd9aac62..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/inference.py +++ /dev/null @@ -1,455 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Module with classes for executing received requests on model callables and sending responses back to Triton.""" - -import abc -import asyncio -import concurrent.futures -import enum -import inspect -import logging -import threading -import time -import traceback -import typing - -from pytriton.exceptions import PyTritonUnrecoverableError -from pytriton.proxy.communication import PyTritonResponseFlags, RequestsServerClient -from pytriton.proxy.telemetry import end_span -from pytriton.proxy.types import Requests, Responses, ResponsesNoneOrError, Scope -from pytriton.proxy.validators import TritonResultsValidator - -LOGGER = logging.getLogger(__name__) - - -class InferenceHandlerEvent(enum.Enum): - """Represents proxy backend event.""" - - STARTED = "started" - CLOSING = "closing" - CLOSED = "closed" - UNRECOVERABLE_ERROR = "unrecoverable-error" - - -InferenceHandlerEventsHandler = typing.Callable[ - ["InferenceHandler", InferenceHandlerEvent, typing.Optional[typing.Any]], None -] - - -class _AsyncGenForCallableAdapter: - """Adapter for converting a callable to an async generator.""" - - def __new__(cls, inference_callable): - """Create an async generator from a callable. - - Args: - inference_callable: A callable to convert to an async generator. - """ - if ( - callable(inference_callable) - and not inspect.isfunction(inference_callable) - and not inspect.ismethod(inference_callable) - ): - inference_callable = inference_callable.__call__ - - if inspect.isasyncgenfunction(inference_callable): - return inference_callable - elif inspect.iscoroutinefunction(inference_callable): - - async def _callable(requests): - yield await inference_callable(requests) - - return _callable - elif inspect.isgeneratorfunction(inference_callable): - - async def _callable(requests): - for result in inference_callable(requests): - yield result - await asyncio.sleep(0) - - return _callable - else: - - async def _callable(requests): - yield inference_callable(requests) - await asyncio.sleep(0) - - return _callable - - -class BaseRequestsResponsesConnector(abc.ABC): - """Base class for requests responses connector.""" - - @abc.abstractmethod - def register_inference_hook(self, run_inference_fn: typing.Callable[[Scope, Requests], concurrent.futures.Future]): - """Register inference hook. - - Args: - run_inference_fn: function to run inference on requests - """ - pass - - @abc.abstractmethod - def unregister_inference_hook( - self, run_inference_fn: typing.Callable[[Scope, Requests], concurrent.futures.Future] - ): - """Unregister inference hook. - - Args: - run_inference_fn: function to run inference on requests - """ - pass - - @abc.abstractmethod - def send(self, scope: Scope, flags: PyTritonResponseFlags, responses: ResponsesNoneOrError): - """Send responses back to server. - - Args: - scope: scope of the requests - flags: flags for responses - responses: responses to send back to server - """ - pass - - -class RequestsResponsesConnector(threading.Thread, BaseRequestsResponsesConnector): - """Thread for handling requests received from Triton.""" - - INFERENCE_FN_REGISTER_WAIT_TIME_S = 5 - - def __init__(self, url: str, serializer_deserializer): - """Requests Server Client thread. - - Args: - url: url of the requests server - serializer_deserializer: serializer and deserializer for requests and responses - """ - self._requests_server_client = RequestsServerClient(url, self.handle_requests) - self._serializer_deserializer = serializer_deserializer - - self._responses_queues: typing.Dict[bytes, asyncio.Queue] = {} - self._run_inference_fn: typing.Optional[typing.Callable[[Scope, Requests], concurrent.futures.Future]] = None - self._run_inference_condition = threading.Condition() - - super().__init__(daemon=True, name=f"{self._requests_server_client._name}-comm_thread") - - def run(self): - """Requests Server Client thread run method.""" - self._requests_server_client.run() - - def close(self): - """Close Requests Server Client thread.""" - self._requests_server_client.shutdown() - - async def handle_requests(self, scope, requests_payload: bytes, send): - """Handle requests received from Triton. - - Args: - scope: scope of the requests - requests_payload: requests payload to handle - send: function to send responses back to Triton - - Returns: - None - - Raises: - Exception: if an error occurs while handling requests - """ - requests_id = scope["requests_id"] - queue = self._responses_queues[requests_id] = asyncio.Queue() - loop = asyncio.get_running_loop() - requests = None - - def _wait_for_inference_fn(timeout_s: float): - with self._run_inference_condition: - return self._run_inference_condition.wait_for( - lambda: self._run_inference_fn is not None, timeout=timeout_s - ) - - try: - requests = self.preprocess(scope, requests_payload) - - if self._run_inference_fn is None: - await loop.run_in_executor(None, _wait_for_inference_fn, self.INFERENCE_FN_REGISTER_WAIT_TIME_S) - - with self._run_inference_condition: - if self._run_inference_fn is None: - raise RuntimeError("Inference callable is not registered (inference handler is stopped)") - - run_inference_future = self._run_inference_fn(scope, requests) - while True: - (flags, responses_or_error) = await queue.get() - if flags & PyTritonResponseFlags.ERROR: - error_msg = "".join( - traceback.format_exception(None, responses_or_error, responses_or_error.__traceback__) - ) - error_msg = error_msg.encode() - await send(scope, flags, error_msg) - break - - responses_payload = self.postprocess(scope, responses_or_error) - - await send(scope, flags, responses_payload) - if flags & PyTritonResponseFlags.EOS: - break - - run_inference_future.result() - except Exception: - error_msg = traceback.format_exc().encode() - flags = PyTritonResponseFlags.ERROR | PyTritonResponseFlags.EOS - await send(scope, flags, error_msg) - finally: - if requests is not None: - for request in requests: - end_span(request.span) - - self._serializer_deserializer.free_requests_resources(requests_payload) - self._responses_queues.pop(requests_id) - LOGGER.debug(f"Finished handling requests for {scope['requests_id'].hex()}") - - def preprocess(self, scope: Scope, requests_payload: bytes) -> Requests: - """Preprocess requests before running inference on them. - - Currently, this method only deserializes requests. - - Args: - scope: scope of the requests - requests_payload: requests payload to preprocess - - Returns: - deserialized requests - """ - return self._serializer_deserializer.deserialize_requests(requests_payload) - - def postprocess(self, scope: Scope, responses: Responses) -> bytes: - """Postprocess responses before sending them back to Triton. - - Currently, this method only serializes responses. - - Args: - scope: scope of the requests - responses: responses to postprocess - - Returns: - serialized responses - """ - if responses is None: - return b"" - else: - return self._serializer_deserializer.serialize_responses(responses) - - def register_inference_hook(self, run_inference_fn: typing.Callable[[Scope, Requests], concurrent.futures.Future]): - """Register inference hook. - - Args: - run_inference_fn: function to run inference on requests - """ - with self._run_inference_condition: - self._run_inference_fn = run_inference_fn - self._run_inference_condition.notify_all() - - def unregister_inference_hook( - self, run_inference_fn: typing.Callable[[Scope, Requests], concurrent.futures.Future] - ): - """Unregister inference hook. - - Args: - run_inference_fn: function to run inference on requests - """ - with self._run_inference_condition: - self._run_inference_fn = None - self._run_inference_condition.notify_all() - - def send(self, scope: Scope, flags: PyTritonResponseFlags, responses: ResponsesNoneOrError): - """Send responses back to server. - - Args: - scope: scope of the requests - flags: flags for responses - responses: responses to send back to server - """ - requests_id = scope["requests_id"] - queue = self._responses_queues[requests_id] - loop = self._requests_server_client.loop - # use no_wait as there is no limit for responses queues - loop.call_soon_threadsafe(queue.put_nowait, (flags, responses)) - - -class InferenceHandler(threading.Thread): - """Thread for running inference on requests.""" - - def __init__( - self, - model_callable: typing.Callable, - requests_responses_connector: BaseRequestsResponsesConnector, - validator: TritonResultsValidator, - name: typing.Optional[str] = None, - ): - """Inference Handler thread. - - Args: - model_callable: model callable to run inference on requests - requests_responses_connector: requests responses connector - validator: validator for requests and responses - name: name of the thread for easy of debugging - """ - self._model_callable = _AsyncGenForCallableAdapter(model_callable) - self._requests_responses_connector = requests_responses_connector - self._validator = validator - - self._loop = None - self._loop_condition = threading.Condition() - self._inference_handler_events_observers: typing.List[InferenceHandlerEventsHandler] = [] - self._wait_for_schechuled_tasks_timeout_s = 20.0 - - name = name or "inference_handler" - super().__init__(daemon=True, name=name) - - def run(self): - """Inference Handler thread run method.""" - with self._loop_condition: - self._loop = asyncio.new_event_loop() - self._loop_condition.notify_all() - - asyncio.set_event_loop(self._loop) - - try: - self._notify_inference_handler_events_observers(InferenceHandlerEvent.STARTED, None) - self._requests_responses_connector.register_inference_hook(self.run_inference) - self._loop.run_forever() - finally: - self._notify_inference_handler_events_observers(InferenceHandlerEvent.CLOSING, None) - try: - _cancel_all_tasks(self._loop) - self._loop.run_until_complete(self._loop.shutdown_asyncgens()) - self._loop.run_until_complete(self._loop.shutdown_default_executor()) - finally: - asyncio.set_event_loop(None) - self._loop.close() - - self._notify_inference_handler_events_observers(InferenceHandlerEvent.CLOSED, None) - - def start(self): - """Start Inference Handler.""" - super().start() - with self._loop_condition: - small_timeout_s = 5 - self._loop_condition.wait_for(lambda: self._loop is not None, timeout=small_timeout_s) - - def stop(self) -> None: - """Stop Inference Handler.""" - LOGGER.info("Closing Inference Handler") - self._requests_responses_connector.unregister_inference_hook(self.run_inference) - if self._loop is not None: - try: - _wait_for_scheduled_tasks( - self._loop, self._handle_requests.__name__, timeout_s=self._wait_for_schechuled_tasks_timeout_s - ) - except TimeoutError: - LOGGER.warning( - "Timeout while waiting for submitted inference tasks to finish. Cancelling remaining tasks." - ) - self._loop.call_soon_threadsafe(self._loop.stop) - - def on_inference_handler_event(self, inference_handler_events_handle_fn: InferenceHandlerEventsHandler): - """Register InferenceHandlerEventsHandler callable. - - Args: - inference_handler_events_handle_fn: function to be called when inference handler event arises - """ - self._inference_handler_events_observers.append(inference_handler_events_handle_fn) - - def run_inference(self, scope: Scope, requests: Requests): - """Run inference on requests. - - Args: - scope: scope of the requests - requests: requests to run inference on - - Returns: - Future of inference task - """ - return asyncio.run_coroutine_threadsafe(self._handle_requests(scope, requests), self._loop) - - async def _handle_requests(self, scope: Scope, requests: Requests): - requests_id = scope["requests_id"] - LOGGER.debug(f"Performing inference on requests={requests_id.hex()}") - - responses = None - try: - async for responses in self._model_callable(requests): - self._validator.validate_responses(requests, responses) - self._requests_responses_connector.send(scope, PyTritonResponseFlags(0), responses) - self._requests_responses_connector.send(scope, PyTritonResponseFlags.EOS, None) - except (Exception, asyncio.CancelledError) as e: - error_msg = traceback.format_exc() - if isinstance(e, PyTritonUnrecoverableError): - LOGGER.error( - f"Unrecoverable error thrown during handling requests={requests_id}. " - "Shutting down Triton Inference Server. " - f"{error_msg}" - ) - self._notify_inference_handler_events_observers(InferenceHandlerEvent.UNRECOVERABLE_ERROR, error_msg) - self.stop() - else: - LOGGER.warning(f"Exception while performing inference on requests={requests_id.hex()}: {error_msg}") - self._requests_responses_connector.send(scope, PyTritonResponseFlags.ERROR | PyTritonResponseFlags.EOS, e) - - LOGGER.debug(f"Finished inference on requests={requests_id.hex()}") - - def _notify_inference_handler_events_observers( - self, - event: InferenceHandlerEvent, - context: typing.Optional[typing.Any], - ): - for inference_handler_events_handler_fn in self._inference_handler_events_observers: - inference_handler_events_handler_fn(self, event, context) - - -def _wait_for_scheduled_tasks(loop, coro_name, timeout_s: float): - def _get_inference_tasks(): - # async generators are separate tasks, and have no names. we should wait for them to finish as well - result = [ - task for task in asyncio.all_tasks(loop) if getattr(task.get_coro(), "__name__", None) in [coro_name, None] - ] - return result - - check_interval_s = 0.1 - while _get_inference_tasks(): - time.sleep(check_interval_s) - timeout_s -= check_interval_s - if timeout_s <= 0: - raise TimeoutError(f"Timeout while waiting for {coro_name} tasks to finish") - - -def _cancel_all_tasks(loop): - """From Python 3.8 asyncio/runners.py.""" - to_cancel = asyncio.all_tasks(loop) - if not to_cancel: - return - - for task in to_cancel: - task.cancel() - - loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True)) - - for task in to_cancel: - if task.cancelled(): - continue - if task.exception() is not None: - loop.call_exception_handler({ - "message": "unhandled exception during asyncio.run() shutdown", - "exception": task.exception(), - "task": task, - }) diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/model.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/model.py deleted file mode 100644 index d815a38050219615248ee6ba10c9225bdf870791..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/model.py +++ /dev/null @@ -1,514 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Model definition for Python Backend for PyTriton. - -This file is automatically copied during deployment on Triton and should not be modified. -""" - -import asyncio -import base64 -import json -import logging -import multiprocessing -import os -import pathlib -import threading -import traceback -import typing -from concurrent.futures import Future as ConcurrentFuture - -import triton_python_backend_utils as pb_utils # type: ignore # pytype: disable=import-error - -from . import communication, data -from .communication import ( # pytype: disable=import-error - HandleResponsesCoro, - HandshakeServer, - PyTritonResponseFlags, - RequestsServer, - get_config_from_handshake_server, -) -from .data import ( # pytype: disable=import-error - PROTOCOL_VERSION, - Base64SerializerDeserializer, - TensorStoreSerializerDeserializer, -) -from .telemetry import TracableModel # pytype: disable=import-error -from .types import Request, Response, ResponsesOrError # pytype: disable=import-error - -LOGGER = logging.getLogger(__name__) - - -def _update_loggers(): - def get_triton_backend_logger(): - try: - # https://github.com/triton-inference-server/python_backend/blob/main/src/pb_stub.cc#L1501 - import triton_python_backend_utils as pb_utils # type: ignore # pytype: disable=import-error - - logger = pb_utils.Logger # pytype: disable=module-attr - logger.error = logger.log_error - logger.warning = logger.log_warn - logger.info = logger.log_info - logger.debug = logger.log_verbose - # do not set log_to_stderr in Backend - except (ImportError, AttributeError): - logger = logging.getLogger("backend") - root_logger = logging.getLogger() - if root_logger.level <= logging.INFO: - multiprocessing.util.log_to_stderr(logging.INFO) - return logger - - logger = get_triton_backend_logger() - global LOGGER - LOGGER = logger - data.LOGGER = logger - communication.LOGGER = logger - communication.SERVER_LOGGER = logger - - -class TritonRequestsServer: - """Class for handling communication between Triton and Inference Callable.""" - - def __init__( - self, - url: str, - responses_handle_fn: HandleResponsesCoro, - serializer_deserializer, - model_config: typing.Dict[str, typing.Any], - ): - """Create TritonRequestsServer object. - - Args: - url: url to the socket - responses_handle_fn: coroutine that handles responses from InferenceHandler - serializer_deserializer: object that serializes and deserializes requests and responses - model_config: Triton model config - """ - self._model_config = model_config - self._model_inputs_names = [model_input["name"] for model_input in model_config["input"]] - self._server = RequestsServer(url, responses_handle_fn) - self._serializer_deserializer = serializer_deserializer - - def run(self): - """Run requests server. - - This method should be called in separate thread. - """ - self._server.run() - - def shutdown(self): - """Shutdown requests server. - - Doesn't wait for server to stop. Should wait till thread running TritonRequestsServer is finished. - """ - self._server.shutdown() - - def push(self, requests_id: bytes, triton_requests, spans=None): - """Push requests to TritonRequestsServer queue. - - Args: - requests_id: id of requests - triton_requests: list of Triton requests - spans: list of OpenTelemetry spans - """ - self._server.wait_till_running() # wait until loop is up and running, raise RuntimeError if server is stopping or not launched yet - kwargs = {"requests_id": requests_id, "triton_requests": triton_requests} - if spans is not None: - kwargs["spans"] = spans - return asyncio.run_coroutine_threadsafe(self._send_requests(**kwargs), self._server.server_loop) - - def _wrap_request(self, triton_request, inputs, span=None) -> Request: - request = {} - for input_name in inputs: - input_tensor = pb_utils.get_input_tensor_by_name(triton_request, input_name) - if input_tensor is not None: - request[input_name] = input_tensor.as_numpy() - kwargs = {} - if span is not None: - kwargs["span"] = span - return Request(data=request, parameters=json.loads(triton_request.parameters()), **kwargs) - - async def _send_requests(self, requests_id: bytes, triton_requests, spans=None) -> ConcurrentFuture: - requests = triton_requests - if spans is None: - spans = [None] * len(triton_requests) - requests_with_spans = zip(triton_requests, spans) - - requests = [ - self._wrap_request(triton_request, self._model_inputs_names, span) - for triton_request, span in requests_with_spans - ] - requests_payload = self._serializer_deserializer.serialize_requests(requests) - # will return when socket.send_multipart returns - responses_future = ConcurrentFuture() - await self._server.send_requests(requests_id, requests_payload, responses_future) - return responses_future - - -def _wrap_response(response: Response, requested_outputs_names, model_outputs_dict): - if response.data is not None: - only_requested = {key: value for key, value in response.data.items() if key in requested_outputs_names} - casted = { - key: value.astype(pb_utils.triton_string_to_numpy(model_outputs_dict[key]["data_type"])) - for key, value in only_requested.items() - } - return pb_utils.InferenceResponse( # pytype: disable=module-attr - output_tensors=[ - pb_utils.Tensor(name, value) - for name, value in casted.items() # pytype: disable=module-attr - ] - ) - else: - return None - - -class BatchResponsesHandler: - """Class for handling responses from InferenceHandler.""" - - def __init__(self, requests_map, serializer_deserializer, model_outputs_dict): - """Init BatchResponsesHandler object.""" - self._requests_map = requests_map - self._serializer_deserializer = serializer_deserializer - self._model_outputs_dict = model_outputs_dict - - async def handle_responses( - self, scope: typing.Dict[str, typing.Any], responses_queue: asyncio.Queue, responses_future: ConcurrentFuture - ): - """Handle responses from InferenceHandler. - - Args: - scope: scope of the request - responses_queue: queue with responses payload from InferenceHandler - responses_future: future for another thread that will be set with Triton Responses or TritonModelException - - Returns: - Triton Responses or TritonModelException - """ - requests_id: bytes = scope["requests_id"] - triton_requests = self._requests_map[requests_id] - - eos = False - triton_responses_or_error = None - while not eos: - try: - flags, responses_payload = await responses_queue.get() - eos = flags & PyTritonResponseFlags.EOS - error = flags & PyTritonResponseFlags.ERROR - - if error: - assert eos - triton_responses_or_error = pb_utils.TritonModelException( # pytype: disable=module-attr - responses_payload.decode("utf-8") - ) - elif responses_payload: - # inference handler should send all responses in payload - assert triton_responses_or_error is None - responses = self._serializer_deserializer.deserialize_responses(responses_payload) - triton_responses_or_error = [ - _wrap_response(response, request.requested_output_names(), self._model_outputs_dict) - for request, response in zip(triton_requests, responses) - ] - except asyncio.CancelledError: - LOGGER.warning(f"Cancelled responses handler for requests={requests_id.hex()}") - triton_responses_or_error = pb_utils.TritonModelException( # pytype: disable=module-attr - "Cancelled responses handler" - ) - eos = True - finally: - if not error: - self._serializer_deserializer.free_responses_resources(responses_payload) - responses_queue.task_done() - - self._requests_map.pop(requests_id) - responses_future.set_result(triton_responses_or_error) - return triton_responses_or_error - - -class DecoupledResponsesHandler: - """Class for handling responses for decoupled model.""" - - def __init__(self, requests_map, serializer_deserializer, model_outputs_dict): - """Create DecoupledResponsesHandler object.""" - self._requests_map = requests_map - self._serializer_deserializer = serializer_deserializer - self._model_outputs_dict = model_outputs_dict - - async def handle_responses( - self, scope: typing.Dict[str, typing.Any], responses_queue: asyncio.Queue, responses_future: ConcurrentFuture - ) -> typing.Optional[ResponsesOrError]: - """Handle responses from InferenceHandler. - - Args: - scope: scope of the request - responses_queue: queue with responses from InferenceHandler - responses_future: future for another thread that will be set with Triton Responses or TritonModelException - - Returns: - Responses or None if responses were sent to client - """ - requests_id: bytes = scope["requests_id"] - loop = asyncio.get_running_loop() - triton_requests = self._requests_map[requests_id] - triton_senders = [request.get_response_sender() for request in triton_requests] - - eos = False - while not eos: - try: - flags, responses_payload = await responses_queue.get() - - eos = flags & PyTritonResponseFlags.EOS - error = flags & PyTritonResponseFlags.ERROR - - triton_responses = None - if error: - triton_responses = [ - pb_utils.InferenceResponse( # pytype: disable=module-attr - error=pb_utils.TritonError(responses_payload.decode("utf-8")) # pytype: disable=module-attr - ) - for _ in triton_senders - ] - else: - responses = self._serializer_deserializer.deserialize_responses(responses_payload) - triton_responses = [ - _wrap_response(response, request.requested_output_names(), self._model_outputs_dict) - for request, response in zip(triton_requests, responses) - ] - - triton_flags = 0 - if eos: - triton_flags = pb_utils.TRITONSERVER_RESPONSE_COMPLETE_FINAL - triton_responses = triton_responses or [None] * len(triton_senders) - - # run sender.send in parallel in executor - assert len(triton_responses) == len(triton_senders) - send_responses_futures = [ - loop.run_in_executor(None, sender.send, response, triton_flags) - for sender, response in zip(triton_senders, triton_responses) - ] - await asyncio.gather(*send_responses_futures) - except asyncio.CancelledError: - LOGGER.warning(f"Cancelled responses handler for requests={requests_id.hex()}") - triton_flags = pb_utils.TRITONSERVER_RESPONSE_COMPLETE_FINAL - triton_response = pb_utils.InferenceResponse( # pytype: disable=module-attr - error=pb_utils.TritonError(error="Cancelled responses handler") # pytype: disable=module-attr - ) - send_responses_futures = [ - loop.run_in_executor(None, sender.send, triton_response, triton_flags) for sender in triton_senders - ] - await asyncio.gather(*send_responses_futures) - finally: - if not error: - self._serializer_deserializer.free_responses_resources(responses_payload) - responses_queue.task_done() - - self._requests_map.pop(requests_id) - responses_future.set_result(None) - - -class TritonInferenceHandlerConfigGenerator: - """PyTriton Inference handler config generator for Triton PythonBackend.""" - - def __init__(self, data_socket: typing.Union[str, pathlib.Path]): - """Initialize the config generator. - - Args: - data_socket: path to the data socket - """ - self._data_socket = pathlib.Path(data_socket) - - def get_config(self) -> typing.Dict[str, typing.Any]: - """Return the config for the inference handler.""" - return { - "protocol_version": PROTOCOL_VERSION, - "data_socket": self._data_socket.as_posix(), - "authkey": base64.encodebytes(multiprocessing.current_process().authkey).decode("ascii"), - } - - -class TritonPythonModel: - """Triton PythonBackend model implementation for proxy.""" - - def __init__(self): - """Dummy inititializer.""" - self._model_config = None - self._model_inputs = None - self._model_outputs = None - self._model_instance_name = None - self._decoupled_model = None - self._serializer_deserializer = None - self._requests_server = None - self._requests_server_thread = None - self._handshake_server = None - self._loop = None - self._frontend = None - self._requests = None - self._id_counter = 0 - self._tracable_model = None - - def initialize(self, args): - """Triton Inference Server Python Backend API called only once when the model is being loaded. - - Allows the model to initialize any state associated with this model. - - Args: - args: Dictionary with both keys and values are strings. The dictionary keys and values are: - * model_config: A JSON string containing the model configuration - * model_instance_kind: A string containing model instance kind - * model_instance_device_id: A string containing model instance device ID - * model_instance_name: A string containing model instance name in form of __ - * model_repository: Model repository path - * model_version: Model version - * model_name: Model name - """ - _update_loggers() # Triton backend logger is available from this point on - - if bool(os.environ.get("PYTRITON_VIZTRACER")): - from viztracer import VizTracer # type: ignore # pytype: disable=import-error - - self._tracer = VizTracer(log_async=True, log_gc=True, tracer_entries=10000000, pid_suffix=True) - self._tracer.register_exit() - self._tracer.start() - - try: - model_name = args["model_name"] - - self._model_config = model_config = json.loads(args["model_config"]) - self._model_inputs = {model_input["name"]: model_input for model_input in model_config["input"]} - self._model_outputs = {model_output["name"]: model_output for model_output in model_config["output"]} - self._model_instance_name = args.get("model_instance_name") - self._decoupled_model = model_config.get("model_transaction_policy", {}).get("decoupled", False) - - workspace_path = pathlib.Path(model_config["parameters"]["workspace-path"]["string_value"]) - - self._tracable_model = TracableModel() - if "trace-config" in model_config["parameters"]: - self._tracable_model.configure_tracing(model_config["parameters"]["trace-config"]["string_value"]) - - LOGGER.debug(f"Model instance name: {self._model_instance_name}") - LOGGER.debug(f"Decoupled model: {self._decoupled_model}") - LOGGER.debug(f"Workspace path: {workspace_path}") - LOGGER.debug(f"Model inputs: {self._model_inputs}") - LOGGER.debug(f"Model outputs: {self._model_outputs}") - - # init serializer/deserializer - data_socket = workspace_path / f"{model_name}-data.sock" - if os.environ.get("PYTRITON_NO_TENSORSTORE"): - self._serializer_deserializer = Base64SerializerDeserializer() - else: - self._serializer_deserializer = TensorStoreSerializerDeserializer() - - handshake_socket = workspace_path / f"{model_name}-config.sock" - model_first_instance_name = "_".join(self._model_instance_name.split("_")[:-1] + ["0"]) - if self._model_instance_name == model_first_instance_name: - inference_handler_config = TritonInferenceHandlerConfigGenerator(data_socket).get_config() - self._serializer_deserializer.start(data_socket) - - self._handshake_server = HandshakeServer(handshake_socket, inference_handler_config) - self._handshake_server.start() - - else: - inference_handler_config = get_config_from_handshake_server(handshake_socket) - LOGGER.debug(f"Loaded configuration from {handshake_socket}") - authkey = base64.decodebytes(inference_handler_config["authkey"].encode("ascii")) - self._serializer_deserializer.connect(data_socket, authkey=authkey) - - self._id_counter = 0 - self._requests = {} - - server_socket_path = workspace_path / f"{self._model_instance_name}-server.sock" - handler_class = DecoupledResponsesHandler if self._decoupled_model else BatchResponsesHandler - LOGGER.debug(f"Using {handler_class.__name__} for handling responses") - self._requests_server = TritonRequestsServer( - url=f"ipc://{server_socket_path.as_posix()}", - responses_handle_fn=handler_class( - self._requests, self._serializer_deserializer, self._model_outputs - ).handle_responses, - serializer_deserializer=self._serializer_deserializer, - model_config=self._model_config, - ) - - def _run_server(): - _update_loggers() - self._requests_server.run() - - self._requests_server_thread = threading.Thread(target=_run_server, name="requests-server", daemon=True) - self._requests_server_thread.start() - except Exception: - msg = traceback.format_exc() - raise pb_utils.TritonModelException( - f"Model initialize error: {msg}" - ) from None # pytype: disable=module-attr - - def execute(self, triton_requests): - """Triton Inference Server Python Backend API method. - - Args: - triton_requests: A list of pb_utils.InferenceRequest - - Returns: - A list of pb_utils.InferenceResponse. The length of this list is the same as `triton_requests` - - Raises: - pb_utils.TritonModelException: when model execution fails - """ - try: - spans = self._tracable_model.start_requests_spans(triton_requests) - - def _generate_id(): - self._id_counter = (self._id_counter + 1) % 2**32 - return self._id_counter.to_bytes(4, "big") - - requests_id = _generate_id() - while requests_id in self._requests: - requests_id = _generate_id() - self._requests[requests_id] = triton_requests - - # TODO: add this future to container to avoid garbage collection - handle_responses_task_async_future = self._requests_server.push(requests_id, triton_requests, spans) - - if not self._decoupled_model: - handle_responses_concurrent_future = handle_responses_task_async_future.result() - triton_responses_or_error = handle_responses_concurrent_future.result() - - self._tracable_model.end_requests_spans(spans, triton_responses_or_error) - - if triton_responses_or_error is not None and isinstance(triton_responses_or_error, Exception): - raise triton_responses_or_error - else: - triton_responses_or_error = None - - self._tracable_model.end_requests_spans(spans, triton_responses_or_error) - - return triton_responses_or_error - except Exception: - msg = traceback.format_exc() - raise pb_utils.TritonModelException(f"Model execute error: {msg}") from None # pytype: disable=module-attr - - def finalize(self) -> None: - """Finalize the model cleaning the buffers.""" - LOGGER.debug(f"[{self._model_instance_name}] Finalizing backend instance") - LOGGER.debug(f"[{self._model_instance_name}] Closing requests server") - self._requests_server.shutdown() - self._requests_server_thread.join() - - LOGGER.debug(f"[{self._model_instance_name}] Closing requests/responses serializer/deserializer") - self._serializer_deserializer.close() - self._serializer_deserializer = None - - LOGGER.debug(f"[{self._model_instance_name}] Closing handshake server") - if self._handshake_server: - self._handshake_server.close() - self._handshake_server = None - - LOGGER.debug(f"[{self._model_instance_name}] Finalized.") - self._model_instance_name = None diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/telemetry.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/telemetry.py deleted file mode 100644 index 677b6e03e881d5cf7dcd0e5190bf9420a39157aa..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/telemetry.py +++ /dev/null @@ -1,339 +0,0 @@ -# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Telemetry handling module. - -This module contains optional import for Open Telemetry and functions to handle it. -""" - -import base64 -import importlib.util -import json -import logging -from contextlib import contextmanager -from typing import Dict, Generator, List - -# Open Telemetry is not mandatory for PyTriton, but it can be used for tracing -# The import in functions breaks telemetry spans handlign in runtime -try: - import opentelemetry.baggage # pytype: disable=import-error - import opentelemetry.trace # pytype: disable=import-error - import opentelemetry.trace.propagation.tracecontext # pytype: disable=import-error - from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter # pytype: disable=import-error - from opentelemetry.sdk.resources import Resource # pytype: disable=import-error - from opentelemetry.sdk.trace import TracerProvider # pytype: disable=import-error - from opentelemetry.sdk.trace.export import ( # pytype: disable=import-error - BatchSpanProcessor, # pytype: disable=import-error - ) - - # from opentelemetry import trace, context - from opentelemetry.trace import ( # pytype: disable=import-error - NonRecordingSpan, - SpanContext, - Status, - StatusCode, - TraceFlags, - ) - from opentelemetry.trace.propagation.tracecontext import ( # pytype: disable=import-error - TraceContextTextMapPropagator, - ) - -except ImportError: - pass - - -LOGGER = logging.getLogger(__name__) - - -_open_telemetry_tracer = None - - -def set_telemetry_tracer(tracer): - """Set tracer for Open Telemetry. - - Sets global tracer used by proxy at inference callable side of communication. - - See trace_config parameter for TritonConfig to set also tracing for Triton. - Function can only be called onece. Second call raises exception. - - Args: - tracer: Tracer object for Open Telemetry - - Raises: - ValueError for second and all further calls - """ - global _open_telemetry_tracer - if _open_telemetry_tracer is not None: - raise ValueError("Telemetry tracer is already set") - LOGGER.debug(f"Setting telemetry tracer: {tracer}") - _open_telemetry_tracer = tracer - - -def get_telemetry_tracer(): - """Return telemetry tracer set by set_telemetry_tracer.""" - global _open_telemetry_tracer - return _open_telemetry_tracer - - -def get_span_dict(span): - """Serialize Open Telemetry span for sending over proxy bus.""" - headers = {} - with opentelemetry.trace.use_span(span, end_on_exit=False): - ctx = opentelemetry.baggage.set_baggage("zmq", "baggage") - opentelemetry.trace.propagation.tracecontext.TraceContextTextMapPropagator().inject(headers, ctx) - return headers - - -def start_span_from_remote(span_dict: Dict[str, int], name: str): - """Create new Open Telemetry span from remote span deserialized from proxy. - - The span ownership goes to caller, which MUST call spand end to register - event in Open Telemetry server. - - Args: - span_dict: dictionary with fields trace_id and span_id or None - name: name of new span started - - Returns: - Open Telemetry span or None if telemetry is not configured or span_dict is None. - """ - global _open_telemetry_tracer - if _open_telemetry_tracer is not None: - ctx = opentelemetry.trace.propagation.tracecontext.TraceContextTextMapPropagator().extract(span_dict) - return _open_telemetry_tracer.start_span(name, context=ctx) - else: - return None - - -def start_span_from_span(span, name): - """Create new Open Telemetry span from existing span. - - The span ownership goes to caller, which MUST call spand end to register - event in Open Telemetry server. - - Args: - span: Open Telemetry span - name: name of new span started - - Returns: - Open Telemetry span - """ - span_context = SpanContext( - trace_id=span.context.trace_id, - span_id=span.context.span_id, - is_remote=True, - trace_flags=TraceFlags(0x01), - ) - ctx = opentelemetry.trace.set_span_in_context(NonRecordingSpan(span_context)) - tracer = get_telemetry_tracer() - return tracer.start_span(name, context=ctx) - - -def parse_trace_config(trace_config_list: List[str]): - """Parse Triton Open Telemetry config. - - The TritonConfig trace_config can be passed here to obtain Open Telemetry resource and - URL to connect to server. - - Example of configuration: - ``` - trace_config=[ - "mode=opentelemetry", - "opentelemetry,url=", - "opentelemetry,resource=service.name=", - "opentelemetry,resource=test.key=test.value", - ] - ``` - Elements: - - List MUST contain mode to indicate opentelemetry support. - - List MUST contain url to allow opening connecion to Open Telemetry server - - List SHOULD contain service.name to improve logging - - List SHOULD contain additional keys like test.key. - - Args: - trace_config_list: list of configuration variable for Tritonconfig - """ - if not any("mode=opentelemetry" in config for config in trace_config_list): - raise ValueError("Only opentelemetry mode is supported") - url_entry = next((config for config in trace_config_list if "opentelemetry,url=" in config), None) - if url_entry is None: - raise ValueError("opentelemetry,url is required") - url = url_entry.split("opentelemetry,url=")[1] - - resource_attributes = {} - for config in trace_config_list: - if config.startswith("opentelemetry,resource="): - resource_str = config.split("opentelemetry,resource=")[1] - resource_parts = resource_str.split(",") - for part in resource_parts: - key, val = part.split("=") - resource_attributes[key] = val - - LOGGER.debug(f"OpenTelemetry URL: {url}") - LOGGER.debug(f"Resource Attributes: {resource_attributes}") - - resource = Resource(attributes=resource_attributes) - return url, resource - - -@contextmanager -def traced_span(request, span_name, **kwargs) -> Generator[None, None, None]: - """Context manager handles opening span for request. - - This context manager opens Open Telemetry span for request. The span is - automatically closed when context manager exits. - - Example of use in inference callable: - ``` - def inference_callable(requests): - responses = [] - for request in requests: - with traced_span(request, "pass-through-get-data"): - # Execute compute for single request - ``` - - Args: - request: Request passed to inference callable - span_name: Name of span to yield - **kwargs: Additional arguments passed to Open Telemetry tracer - """ - global _open_telemetry_tracer - span = request.span - if span is not None: - with opentelemetry.trace.use_span(span, end_on_exit=False, record_exception=False): - with _open_telemetry_tracer.start_as_current_span(span_name, **kwargs): - yield - else: - yield - - -def build_proxy_tracer_from_triton_config(trace_config): - """Build OpenTelemetry tracer from TritonConfig trace_config. - - Args: - trace_config: list of trace configuration variables - - Returns: - OpenTelemetry tracer - """ - raise_if_no_telemetry() - LOGGER.debug(f"Building OpenTelmetry tracer from config: {trace_config}") - url, resource = parse_trace_config(trace_config) - LOGGER.debug(f"Creating OpenTelemetry tracer with URL: {url}") - opentelemetry.trace.set_tracer_provider( - TracerProvider( - resource=resource, - active_span_processor=BatchSpanProcessor(OTLPSpanExporter(endpoint=url)), - ) - ) - - tracer = opentelemetry.trace.get_tracer(__name__) - return tracer - - -def raise_if_no_telemetry(): - """Raise ImportError if OpenTelemetry is not installed.""" - # Import added to trigger error for missing package - if importlib.util.find_spec("opentelemetry.trace") is None: - pip = "pip install opentelemetry-api opentelemetry-sdk opentelemetry-exporter-otlp" - raise ImportError(f"OpenTelemetry is not installed. Please install it using '{pip}'.") - - -def end_span(span, error=None): - """End Open Telemetry span and set status if error is provided. - - Args: - span: Open Telemetry span - error: error message to set in span status - """ - if span is not None: - if error is not None: - span.set_status(Status(StatusCode.ERROR, error)) - else: - span.set_status(Status(StatusCode.OK)) - span.end() - - -class TracableModel: - """Model class with tracing support. - - This class is base class for model with tracing support. It provides - methods to start and end span for each inference call. - """ - - def __init__(self): - """Initialize TracableModel.""" - self._open_telemetry_tracer = None - - def configure_tracing(self, trace_config): - """Configure tracing for model. - - This method configures OpenTelemetry tracing for model. The trace_config - is list of configuration variables passed by TritonConfig. - - Args: - trace_config: list of trace configuration variables - """ - try: - raise_if_no_telemetry() - - trace_config_json = base64.b64decode(trace_config).decode("utf-8") - trace_config_list = json.loads(trace_config_json) - LOGGER.debug(f"Configuring tracing with {trace_config_list}") - - url, resource = parse_trace_config(trace_config_list) - - opentelemetry.trace.set_tracer_provider(TracerProvider(resource=resource)) - trace_provider = opentelemetry.trace.get_tracer_provider() - self._open_telemetry_tracer = trace_provider.get_tracer("pbe") - trace_provider.add_span_processor(BatchSpanProcessor(OTLPSpanExporter(endpoint=url))) - - except Exception as e: - raise ValueError(f"Failed to configure tracing: {e}") from e - - def start_requests_spans(self, triton_requests): - """Start spans for requests. - - This method starts spans for each request in triton_requests. - - Args: - triton_requests: list of Triton requests - """ - if self._open_telemetry_tracer is not None: - spans = [] - for triton_request in triton_requests: - context = triton_request.trace().get_context() - if context is None: - context = "{}" - ctx = TraceContextTextMapPropagator().extract(carrier=json.loads(context)) - span = self._open_telemetry_tracer.start_span("python_backend_execute", context=ctx) - spans.append(span) - return spans - return None - - def end_requests_spans(self, spans, triton_responses_or_error): - """End spans for requests. - - This method ends spans for each request in triton_requests. - - Args: - spans: list of spans for requests - triton_responses_or_error: list of Triton responses or error - """ - if self._open_telemetry_tracer is not None: - status = Status(StatusCode.OK) - if triton_responses_or_error is not None and isinstance(triton_responses_or_error, Exception): - status = Status(StatusCode.ERROR, str(triton_responses_or_error)) - for span in spans: - span.set_status(status) - span.end() diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/types.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/types.py deleted file mode 100644 index dd24e6039d65c82211fe50a0116f12a6f43ad2f3..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/types.py +++ /dev/null @@ -1,122 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Common data structures and type used by proxy model and inference handler.""" - -import dataclasses -from typing import Any, Dict, List, Optional, Union - -import numpy as np - -from .telemetry import traced_span - - -@dataclasses.dataclass -class Request: - """Data class for request data including numpy array inputs.""" - - data: Dict[str, np.ndarray] - """Input data for the request.""" - parameters: Optional[Dict[str, Union[str, int, bool]]] = None - """Parameters for the request.""" - span: Optional[Any] = None - """Telemetry span for request""" - - def __getitem__(self, input_name: str) -> np.ndarray: - """Get input data.""" - return self.data[input_name] - - def __setitem__(self, input_name: str, input_data: np.ndarray): - """Set input data.""" - self.data[input_name] = input_data - - def __delitem__(self, input_name: str): - """Delete input data from request.""" - del self.data[input_name] - - def __len__(self): - """Get number of inputs.""" - return len(self.data) - - def __iter__(self): - """Iterate over input names.""" - return iter(self.data) - - def items(self): - """Iterate over input names and data.""" - return self.data.items() - - def keys(self): - """Iterate over input names.""" - return self.data.keys() - - def values(self): - """Iterate over input data.""" - return self.data.values() - - def traced_span(self, span_name): - """Yields Open Telemetry a span for the request. - - Args: - span_name (str): Name of the span - """ - return traced_span(self, span_name) - - -Requests = List[Request] - - -@dataclasses.dataclass -class Response: - """Data class for response data including numpy array outputs.""" - - data: Dict[str, np.ndarray] - - def __getitem__(self, output_name: str) -> np.ndarray: - """Get output data.""" - return self.data[output_name] - - def __setitem__(self, output_name: str, output_data: np.ndarray): - """Set output data.""" - self.data[output_name] = output_data - - def __delitem__(self, output_name: str): - """Delete output data from response.""" - del self.data[output_name] - - def __len__(self): - """Get number of outputs.""" - return len(self.data) - - def __iter__(self): - """Iterate over output names.""" - return iter(self.data) - - def items(self): - """Iterate over output names and data.""" - return self.data.items() - - def keys(self): - """Iterate over output names.""" - return self.data.keys() - - def values(self): - """Iterate over output data.""" - return self.data.values() - - -Responses = List[Response] -ResponsesOrError = Union[Responses, Exception] -ResponsesNoneOrError = Union[Responses, None, Exception] - -Scope = Dict[str, Any] diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/validators.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/validators.py deleted file mode 100644 index f96cfd4c9ee77d0d1b772260d6d3687dc5ba97a1..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/proxy/validators.py +++ /dev/null @@ -1,168 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Validators used in proxy module.""" - -import logging - -import numpy as np - -from pytriton.proxy.types import Requests, Responses - -LOGGER = logging.getLogger(__name__) - - -class TritonResultsValidator: - """Validate results returned by inference callable against PyTriton and Triton requirements.""" - - def __init__(self, model_config, strict: bool): - """Validate results returned by inference callable against PyTriton and Triton requirements. - - Args: - model_config: Model configuration on Triton side - strict: Enable/disable strict validation against model config - """ - self._model_config = model_config - self._model_outputs = {output.name: output for output in model_config.outputs} - self._strict = strict - - def validate_responses(self, requests: Requests, responses: Responses): - """Validate responses returned by inference callable against PyTriton and Triton requirements. - - Args: - requests: Requests received from Triton - responses: Responses returned by inference callable - - Raises: - ValueError if responses are incorrect - """ - requests_number = len(requests) - _validate_outputs(self._model_config, self._model_outputs, responses, self._strict, requests_number) - - -def _validate_outputs(model_config, model_outputs, outputs, strict: bool, requests_number: int): - """Validate outputs of model. - - Args: - model_config: Model configuration on Triton side - model_outputs: Mapped outputs configuration - outputs: Returned outputs from inference callable - strict: Enable/disable strict validation against model config - requests_number: Number of requests - - Raises: - ValueError if outputs are incorrect - """ - if not isinstance(outputs, list): - raise ValueError( - f"Outputs returned by `{model_config.model_name}` model callable " - f"must be list of response dicts with numpy arrays. Got outputs={outputs} instead." - ) - if len(outputs) != requests_number: - raise ValueError( - f"Number of outputs returned by `{model_config.model_name}` inference callable " - f"({len(outputs)}) does not match number of requests ({requests_number}) received from Triton." - ) - - LOGGER.debug(f"Number of responses: {len(outputs)}") - for response_idx, response in enumerate(outputs): - LOGGER.debug(f"Response #{response_idx}") - if not isinstance(response, dict): - raise ValueError( - f"Outputs returned by `{model_config.model_name}` model callable " - f"must be list of response dicts with numpy arrays. Got response={response} instead." - ) - for name, value in response.items(): - LOGGER.debug(f" {name}: {value} shape={value.shape} dtype={value.dtype}") - _validate_output_data(model_config, name, value) - if strict: - _validate_output_dtype_and_shape(model_config, model_outputs, name, value) - - -def _validate_output_data(model_config, name, value): - """Validate output with given name and value. - - Args: - model_config: Model configuration on Triton side - name: Name of output - value: Value returned in output - - Raises: - ValueError if output is incorrect - """ - if not isinstance(name, str): - raise ValueError(f"Not all keys returned by `{model_config.model_name}` model callable are string") - if not isinstance(value, np.ndarray): - raise ValueError(f"Not all values returned by `{model_config.model_name}` model callable are numpy arrays") - else: - allowed_kind = "biufOSU" - if value.dtype.kind not in allowed_kind: - raise ValueError( - f"Only bool, numeric, string, unicode and object arrays " - f"are supported by Triton (dtype.kind: {allowed_kind}). " - f"Returned `{name}` for model `{model_config.model_name}` " - f"has `{value.dtype.kind}` dtype.kind." - ) - if value.dtype.kind == "O": - if isinstance(value.item(0), str): - raise ValueError( - "Use string/byte-string instead of object for passing " - f"string in NumPy array from model `{model_config.model_name}`." - ) - elif not isinstance(value.item(0), bytes): - raise ValueError( - "Only bytes as objects dtype are supported by PyTriton. " - f"Returned `{name}` from `{model_config.model_name}` " - f"has `{type(value.item(0))}` type." - ) - - -def _validate_output_dtype_and_shape(model_config, model_outputs, name, value): - """Validate output with given name and value against the model config. - - Args: - model_config: Model configuration on Triton side - model_outputs: Mapped outputs defined in model config - name: Name of output - value: Value returned in output - - Raises: - ValueError if output does not match defined values in model config - """ - output_config = model_outputs.get(name) - if not output_config: - raise ValueError( - f"Returned output `{name}` is not defined in model config for model `{model_config.model_name}`." - ) - - allowed_object_types = [bytes, object, np.bytes_, np.object_] - if (value.dtype.kind not in "OSU" and not np.issubdtype(value.dtype, output_config.dtype)) or ( - value.dtype.kind in "OSU" and output_config.dtype not in allowed_object_types - ): - raise ValueError( - f"Returned output `{name}` for model `{model_config.model_name}` has invalid type. " - f"Returned: {value.dtype} ({value.dtype.kind}). Expected: {output_config.dtype}." - ) - - batch_shape = 1 if model_config.batching else 0 - if len(value.shape[batch_shape:]) != len(output_config.shape): - raise ValueError( - f"Returned output `{name}` for model `{model_config.model_name}` has invalid shapes. " - f"Returned: {value.shape[batch_shape:]}. Expected: {output_config.shape}." - ) - if any(x != y != -1 for x, y in zip(value.shape[batch_shape:], output_config.shape)): - raise ValueError( - f"Returned output `{name}` for model `{model_config.model_name}` " - "has invalid shapes at one or more positions. " - f"Returned: {value.shape[batch_shape:]}. Expected: {output_config.shape}." - ) diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/server/__init__.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/server/__init__.py deleted file mode 100644 index 935a79d63fab2a26564aa03472f3af1e62b0a954..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/server/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) 2020-2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# noqa: D104 diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/server/python_backend_config.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/server/python_backend_config.py deleted file mode 100644 index 5da062b96b701cd4654158a84233260f2da2d90f..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/server/python_backend_config.py +++ /dev/null @@ -1,150 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Python Backend configuration class. - -Use to configure the CLI argument for Python Backend passed on Triton Inference Server process start. - - Examples of use: - - config = PythonBackendConfig() - config["shm-default-byte-size"] = 33554432 - config.to_list_args() # ["python,shm-default-byte-size=33554432"] -""" - -from typing import Any, Dict, List, Optional, Union - -from pytriton.exceptions import PyTritonError - - -class PythonBackendConfig: - """A config class to set arguments to the Triton Inference Server. - - An argument set to None will use the server default. - """ - - backend_arg_keys = [ - "shm-region-prefix-name", - "shm-default-byte-size", - "shm-growth-byte-size", - ] - - def __init__(self): - """Construct PythonBackendConfig.""" - self._backend_args = {} - - @classmethod - def allowed_keys(cls): - """Return the list of available server arguments with snake cased options. - - Returns: - List of str. The keys that can be used to configure Python Backend instance - """ - snake_cased_keys = [key.replace("-", "_") for key in cls.backend_arg_keys] - return cls.backend_arg_keys + snake_cased_keys - - @classmethod - def backend_keys(cls): - """Return the list of available server arguments with snake cased options. - - Returns: - List of str. The keys that can be used to configure Python Backend instance - """ - snake_cased_keys = [key.replace("-", "_") for key in cls.backend_arg_keys] - return cls.backend_arg_keys + snake_cased_keys - - def update_config(self, params: Optional[Dict] = None) -> None: - """Allows setting values from a params dict. - - Args: - params: The keys are allowed args to perf_analyzer - """ - if params: - for key in params: - self[key.strip().replace("_", "-")] = params[key] - - def to_list_args(self) -> List[str]: - """Utility function to convert a config into a list of arguments to the server with CLI. - - Returns: - The command consisting of all set arguments to the Python Backend. - e.g. ['python,shm-default-byte-size=33554432'] - """ - cli_items = [] - for key, val in self._backend_args.items(): - if val is None: - continue - cli_items.append(f"python,{key}={val}") - - return cli_items - - def copy(self) -> "PythonBackendConfig": - """Create copy of config. - - Returns: - PythonBackendConfig object that has the same args as this one - """ - config_copy = PythonBackendConfig() - config_copy.update_config(params=self._backend_args) - return config_copy - - def backend_args(self) -> Dict: - """Return the dict with defined server arguments. - - Returns: - Dict where keys are server arguments values are their values - """ - return self._backend_args - - def __getitem__(self, key: str) -> Any: - """Gets an arguments value in config. - - Args: - key: The name of the argument to the Python Backend - - Returns: - The value that the argument is set to in this config - """ - kebab_cased_key = key.strip().replace("_", "-") - return self._backend_args.get(kebab_cased_key, None) - - def __setitem__(self, key: str, value: Union[str, int]) -> None: - """Sets an arguments value in config after checking if defined/supported. - - Args: - key: The name of the argument to the Python Backend - value: The value to which the argument is being set - - Raises: - PyTritonError: if key is unsupported or undefined in the config class - """ - assert isinstance(value, int) or isinstance(value, str) - - kebab_cased_key = key.strip().replace("_", "-") - if kebab_cased_key in self.backend_arg_keys: - self._backend_args[kebab_cased_key] = value - else: - raise PyTritonError(f"The argument {key!r} to the Python Backend is not supported by the pytriton.") - - def __contains__(self, key: str) -> bool: - """Checks if an argument is defined in the PythonBackendConfig. - - Args: - key: The name of the attribute to check for definition in PythonBackendConfig - - Returns: - True if the argument is defined in the config, False otherwise - """ - kebab_cased_key = key.strip().replace("_", "-") - value = self._backend_args.get(kebab_cased_key, None) - return value is not None diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/server/triton_server.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/server/triton_server.py deleted file mode 100644 index b558690c53a93e026f3a4a096be38ba916814eb5..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/server/triton_server.py +++ /dev/null @@ -1,280 +0,0 @@ -# Copyright (c) 2020-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Triton Inference Server class. - -Use to start and maintain the Triton Inference Server process. - - Examples of use: - - server = TritonServer( - path="/path/to/tritonserver/binary", - libs_path="/path/to/tritonserver/libraries", - config=TritonServerConfig() - ) - server.start() - -""" - -import ctypes.util -import importlib -import json -import logging -import os -import pathlib -import signal -import sys -import threading -import traceback -from typing import Callable, Dict, Literal, Optional, Sequence, Union - -from pytriton.utils.logging import silence_3rd_party_loggers - -from ..utils import endpoint_utils -from .triton_server_config import TritonServerConfig - -LOGGER = logging.getLogger(__name__) -SERVER_OUTPUT_TIMEOUT_SECS = 30 -_PROXY_REQUIRED_MODULES = ["numpy", "zmq"] -_PYTRITON_STARTED_IN_PY310 = (3, 10) <= sys.version_info < (3, 11) - -silence_3rd_party_loggers() - - -def get_triton_python_backend_python_env() -> pathlib.Path: - """Get the path to the python environment for the triton python backend. - - Officially built python backend is built with python 3.8 so need to - use the same python version to run the python backend. - - Also, python environment should contain packages required by the proxy. - - Returns: - Path to the python environment with python 3.8 - """ - env_path = pathlib.Path(sys.exec_prefix) - installed_modules = [] - missing_modules = [] - for module_name in _PROXY_REQUIRED_MODULES: - try: - importlib.import_module(module_name) - installed_modules.append(module_name) - except ImportError: - missing_modules.append(module_name) - - if missing_modules: - raise RuntimeError( - "Python environment for python backend is missing required packages. " - f"Ensure that you have {', '.join(_PROXY_REQUIRED_MODULES)} installed in the {env_path} environment. " - f"Installed modules {', '.join(installed_modules)}. Missing modules {', '.join(missing_modules)}." - ) - - return env_path - - -class TritonServer: - """Implementation of TritonServer interface that runs tritonserver locally as subprocess.""" - - def __init__( - self, - *, - path: Union[str, pathlib.Path], - libs_path: Union[str, pathlib.Path], - config: TritonServerConfig, - gpus: Optional[Sequence[int]] = None, - verbose: bool = True, - ): - """Triton server constructor. - - Args: - path: The absolute path to the tritonserver executable - libs_path: The absolute path to the tritonserver libraries - config: The config object containing arguments for this server instance - gpus: sequence of GPUs device ids to attach to process - verbose: Enable verbose logging of server to STDOUT - """ - self._server_path = pathlib.Path(path) - self._server_libs_path = pathlib.Path(libs_path) - self._server_config = config - self._gpus = gpus - self._tritonserver_running_cmd = None - self._tritonserver_logs = "" - self._verbose = verbose - self._on_exit_lock = threading.RLock() - self._on_exit = [] - - assert self._server_config["model-repository"], "Triton Server requires --model-repository argument to be set." - - def start(self) -> None: - """Starts the tritonserver process. - - The method can be executed multiple times and only single process is started. - """ - if self.is_alive(): - raise RuntimeError( - f"You have to stop previously started tritonserver process first " - f"pid={self._tritonserver_running_cmd.pid}" - ) - else: - env = self._get_env() - - LOGGER.debug(f"Triton Server binary {self._server_path}. Environment:\n{json.dumps(env, indent=4)}") - tritonserver_cmd, *rest = self._server_path.as_posix().split(" ", 1) - - import sh - - tritonserver_cmd = sh.Command(tritonserver_cmd) - tritonserver_cmd = tritonserver_cmd.bake(*rest) - - tritonserver_args = self._server_config.to_args_list() - - def _preexec_fn(): - PR_SET_PDEATHSIG = 1 # noqa - libc = ctypes.CDLL(ctypes.util.find_library("c"), use_errno=True) - libc.prctl(PR_SET_PDEATHSIG, signal.SIGTERM) - - self._tritonserver_logs = "" - self._tritonserver_running_cmd = tritonserver_cmd( - *tritonserver_args, - _env=env, - _err_to_out=True, - _out=self._record_logs, - _out_bufsize=0, - _err_bufsize=0, - _bg=True, - _bg_exc=False, - _done=self._handle_exit, - _preexec_fn=_preexec_fn, - ) - - def stop(self) -> None: - """Send the SIGINT signal to running process and wait until server finished.""" - if self.is_alive(): - LOGGER.debug( - f"Stopping Triton Inference server - sending SIGINT signal and wait {SERVER_OUTPUT_TIMEOUT_SECS}s" - ) - self._tritonserver_running_cmd.process.signal(signal.SIGINT) - try: - LOGGER.debug("Waiting for process to stop.") - self._tritonserver_running_cmd.wait(timeout=SERVER_OUTPUT_TIMEOUT_SECS) - except Exception: - message = traceback.format_exc() - LOGGER.debug(f"Error message: \n{message}") - try: - if self.is_alive(): - LOGGER.debug("Timeout waiting for server. Trying to kill process.") - self._tritonserver_running_cmd.process.kill() - self._tritonserver_running_cmd.wait(timeout=SERVER_OUTPUT_TIMEOUT_SECS) - except Exception: - LOGGER.debug(f"Could not kill triton server pid={self._tritonserver_running_cmd.pid}") - message = traceback.format_exc() - LOGGER.debug(f"Error message: \n{message}") - - def register_on_exit(self, callback: Callable) -> None: - """Register callback executed on process exit. - - Args: - callback: callable to register in callbacks - """ - with self._on_exit_lock: - self._on_exit.append(callback) - - def unregister_on_exit(self, callback: Callable) -> None: - """Unregister callback executed on process exit. - - Args: - callback: callable to unregister from callbacks - """ - with self._on_exit_lock: - self._on_exit.remove(callback) - - def is_alive(self) -> bool: - """Verify if server is currently running. - - Returns: - True when server is running, False otherwise - """ - return self._tritonserver_running_cmd is not None and self._tritonserver_running_cmd.is_alive() - - def logs(self) -> str: - """Return the server logs of running server. - - Returns: - String with capture logs - """ - return self._tritonserver_logs - - def get_endpoint(self, endpoint: Literal["http", "grpc", "metrics"]) -> str: - """Get endpoint url. - - Args: - endpoint: endpoint name - - Returns: - endpoint url in form of {protocol}://{host}:{port} - """ - return endpoint_utils.get_endpoint(self._server_config, endpoint) - - def _record_logs(self, line: Union[bytes, str]) -> None: - """Record logs obtained from server process. If verbose logging enabled, print the log into STDOUT. - - Args: - line: Log line obtained from server - """ - if isinstance(line, bytes): - line = line.decode("utf-8", errors="replace") - - if self._verbose: - print(line, end="") # noqa: T201 - - self._tritonserver_logs += line - - def _get_env(self) -> Dict: - """Create and return environment variables for server execution. - - Returns: - Dict with environment variables - """ - env = os.environ.copy() - if self._gpus and isinstance(self._gpus, (list, tuple)): - env["CUDA_VISIBLE_DEVICES"] = ",".join([str(gpu) for gpu in self._gpus]) - - if "LD_LIBRARY_PATH" in env: - env["LD_LIBRARY_PATH"] += ":" + self._server_libs_path.as_posix() - else: - env["LD_LIBRARY_PATH"] = self._server_libs_path.as_posix() - - env_path = get_triton_python_backend_python_env() - python_bin_directory = env_path / "bin" - env["PATH"] = f"{python_bin_directory.as_posix()}:{env['PATH']}" - - return env - - def _handle_exit(self, _, success, exit_code) -> None: - """Handle exit of server process. Trigger callbacks if provided. - - Args: - success: Flag indicating if process succeeded or failed - exit_code: Exit code with which server process finished - """ - if not success: - LOGGER.warning("Triton Inference Server exited with failure. Please wait.") - LOGGER.debug(f"Triton Inference Server exit code {exit_code}") - else: - LOGGER.debug("Triton Inference Server stopped") - with self._on_exit_lock: - for callback in self._on_exit: - try: - callback(success, exit_code) - except Exception as e: - LOGGER.debug(f"Error during calling on_exit callback; {e}") diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/server/triton_server_config.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/server/triton_server_config.py deleted file mode 100644 index ff2e86df2d4d9fe8319cbdb2631ea6f28e37581c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/server/triton_server_config.py +++ /dev/null @@ -1,244 +0,0 @@ -# Copyright (c) 2020-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Triton Inference Server configuration class. - -Use to configure the CLI argument for starting the Triton Inference Server process. - - Examples of use: - - config = TritonServerConfig() - config["log-verbose"] = 1 - config.to_cli_string() -""" - -from typing import Any, Dict, List, Optional - -from pytriton.exceptions import PyTritonError - - -class TritonServerConfig: - """A config class to set arguments to the Triton Inference Server. - - An argument set to None will use the server default. - """ - - # https://github.com/triton-inference-server/server/blob/main/src/command_line_parser.cc - server_arg_keys = [ - # Server - "id", - # Logging - "log-verbose", - "log-info", - "log-warning", - "log-error", - "log-format", - "log-file", - # Model Repository - "model-store", - "model-repository", - # Exit - "exit-timeout-secs", - "exit-on-error", - # Strictness - "disable-auto-complete-config", - "strict-model-config", - "strict-readiness", - # http options - "allow-http", - "http-address", - "http-port", - "reuse-http-port", - "http-header-forward-pattern", - "http-thread-count", - # grpc options - "allow-grpc", - "grpc-address", - "grpc-port", - "reuse-grpc-port", - "grpc-header-forward-pattern", - "grpc-infer-allocation-pool-size", - "grpc-use-ssl", - "grpc-use-ssl-mutual", - "grpc-server-cert", - "grpc-server-key", - "grpc-root-cert", - "grpc-infer-response-compression-level", - "grpc-keepalive-time", - "grpc-keepalive-timeout", - "grpc-keepalive-permit-without-calls", - "grpc-http2-max-pings-without-data", - "grpc-http2-min-recv-ping-interval-without-data", - "grpc-http2-max-ping-strikes", - "grpc-restricted-protocol", - # metrics options - "allow-metrics", - "allow-gpu-metrics", - "allow-cpu-metrics", - "metrics-interval-ms", - "metrics-port", - "metrics-address", - # Model control - "model-control-mode", - "repository-poll-secs", - "load-model", - # Memory and GPU - "pinned-memory-pool-byte-size", - "cuda-memory-pool-byte-size", - "min-supported-compute-capability", - "buffer-manager-thread-count", - # Backend config - "backend-directory", - "backend-config", - "allow-soft-placement", - "gpu-memory-fraction", - "tensorflow-version", - # SageMaker integration - "allow-sagemaker", - "sagemaker-port", - "sagemaker-safe-port-range", - "sagemaker-thread-count", - # VertexAI integration - "allow-vertex-ai", - "vertex-ai-port", - "vertex-ai-thread-count", - "vertex-ai-default-model", - "metrics-config", - "trace-config", - "cache-config", - "cache-directory", - ] - - def __init__(self): - """Construct TritonServerConfig.""" - self._server_args = {} - - @classmethod - def allowed_keys(cls): - """Return the list of available server arguments with snake cased options. - - Returns: - List of str. The keys that can be used to configure tritonserver instance - """ - snake_cased_keys = [key.replace("-", "_") for key in cls.server_arg_keys] - return cls.server_arg_keys + snake_cased_keys - - def update_config(self, params: Optional[Dict] = None) -> None: - """Allows setting values from a params dict. - - Args: - params: The keys are allowed args to perf_analyzer - """ - if params: - for key in params: - self[key.strip().replace("_", "-")] = params[key] - - def to_cli_string(self) -> str: - """Utility function to convert a config into a string of arguments to the server with CLI. - - Returns: - The command consisting of all set arguments to the tritonserver. - e.g. '--model-repository=/models --log-verbose=True' - """ - cli_items = [] - for key, val in self._server_args.items(): - if val is None: - continue - if isinstance(val, (tuple, list)): - for sub_val in val: - cli_items.append(f"--{key}={sub_val}") - else: - cli_items.append(f"--{key}={val}") - return " ".join(cli_items) - - def to_args_list(self) -> List: - """Utility function to convert a cli string into a list of arguments. - - The function is taking into account "smart" delimiters. Notice in the example below that only the first equals - sign is used as split delimiter. - - Returns: - The list of arguments consisting of all set arguments to the tritonserver. - - Example: - input cli_string: "--model-control-mode=explicit - --backend-config=tensorflow,version=2" - output: ['--model-control-mode', 'explicit', - '--backend-config', 'tensorflow,version=2'] - """ - args_list = [] - args = self.to_cli_string().split() - for arg in args: - args_list += arg.split("=", 1) - return args_list - - def copy(self) -> "TritonServerConfig": - """Create copy of config. - - Returns: - TritonServerConfig object that has the same args as this one - """ - config_copy = TritonServerConfig() - config_copy.update_config(params=self._server_args) - return config_copy - - def server_args(self) -> Dict: - """Return the dict with defined server arguments. - - Returns: - Dict where keys are server arguments values are their values - """ - return self._server_args - - def __getitem__(self, key: str) -> Any: - """Gets an arguments value in config. - - Args: - key: The name of the argument to the tritonserver - - Returns: - The value that the argument is set to in this config - """ - kebab_cased_key = key.strip().replace("_", "-") - return self._server_args.get(kebab_cased_key, None) - - def __setitem__(self, key: str, value: Any) -> None: - """Sets an arguments value in config after checking if defined/supported. - - Args: - key: The name of the argument to the tritonserver - value: The value to which the argument is being set - - Raises: - PyTritonError: if key is unsupported or undefined in the config class - """ - kebab_cased_key = key.strip().replace("_", "-") - if kebab_cased_key in self.server_arg_keys: - self._server_args[kebab_cased_key] = value - else: - raise PyTritonError( - f"The argument {key!r} to the Triton Inference " "Server is not supported by the pytriton." - ) - - def __contains__(self, key: str) -> bool: - """Checks if an argument is defined in the TritonServerConfig. - - Args: - key: The name of the attribute to check for definition in TritonServerConfig - - Returns: - True if the argument is defined in the config, False otherwise - """ - kebab_cased_key = key.strip().replace("_", "-") - value = self._server_args.get(kebab_cased_key, None) - return value is not None diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/triton.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/triton.py deleted file mode 100644 index 2572d4baf0abe50d62e8d1fdd237d36421dec5d7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/triton.py +++ /dev/null @@ -1,874 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Triton Inference Server class. - -The class provide functionality to run Triton Inference Server, load the Python models and serve the requests/response -for models inference. - - Examples of use: - with Triton() as triton: - triton.bind( - model_name="BERT", - infer_func=_infer_fn, - inputs=[ - Tensor(dtype=np.bytes_, shape=(1,)), - ], - outputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - ], - config=PythonModelConfig(max_batch_size=16), - ) - triton.serve() -""" - -import atexit -import codecs -import contextlib -import dataclasses -import logging -import os -import pathlib -import re -import shutil -import sys -import threading -import threading as th -import typing -from typing import Any, Callable, Dict, List, Optional, Sequence, Union - -import typing_inspect - -from pytriton.client import ModelClient -from pytriton.client.utils import TritonUrl, create_client_from_url, wait_for_server_ready -from pytriton.constants import DEFAULT_TRITON_STARTUP_TIMEOUT_S -from pytriton.decorators import TritonContext -from pytriton.exceptions import PyTritonValidationError -from pytriton.model_config.tensor import Tensor -from pytriton.models.manager import ModelManager -from pytriton.models.model import Model, ModelConfig, ModelEvent -from pytriton.proxy.telemetry import build_proxy_tracer_from_triton_config, get_telemetry_tracer, set_telemetry_tracer -from pytriton.server.python_backend_config import PythonBackendConfig -from pytriton.server.triton_server import TritonServer -from pytriton.server.triton_server_config import TritonServerConfig -from pytriton.utils import endpoint_utils -from pytriton.utils.dataclasses import kwonly_dataclass -from pytriton.utils.distribution import get_libs_path, get_root_module_path, get_stub_path -from pytriton.utils.workspace import Workspace - -LOGGER = logging.getLogger(__name__) - -TRITONSERVER_DIST_DIR = get_root_module_path() / "tritonserver" -MONITORING_PERIOD_S = 10.0 -WAIT_FORM_MODEL_TIMEOUT_S = 60.0 -INITIAL_BACKEND_SHM_SIZE = 4194304 # 4MB, Python Backend default is 64MB, but is automatically increased -GROWTH_BACKEND_SHM_SIZE = 1048576 # 1MB, Python Backend default is 64MB - -MODEL_URL = "/v2/models/{model_name}" -MODEL_READY_URL = f"{MODEL_URL}/ready/" -MODEL_CONFIG_URL = f"{MODEL_URL}/config/" -MODEL_INFER_URL = f"{MODEL_URL}/infer/" - - -# see https://github.com/triton-inference-server/server/blob/main/src/command_line_parser.cc for more details -@kwonly_dataclass -@dataclasses.dataclass -class TritonConfig: - """Triton Inference Server configuration class for customization of server execution. - - The arguments are optional. If value is not provided the defaults for Triton Inference Server are used. - Please, refer to https://github.com/triton-inference-server/server/ for more details. - - Args: - id: Identifier for this server. - log_verbose: Set verbose logging level. Zero (0) disables verbose logging and - values >= 1 enable verbose logging. - log_file: Set the name of the log output file. - exit_timeout_secs: Timeout (in seconds) when exiting to wait for in-flight inferences to finish. - exit_on_error: Exit the inference server if an error occurs during initialization. - strict_readiness: If true /v2/health/ready endpoint indicates ready if the server is - responsive and all models are available. - allow_http: Allow the server to listen for HTTP requests. - http_address: The address for the http server to bind to. Default is 0.0.0.0. - http_port: The port for the server to listen on for HTTP requests. Default is 8000. - http_header_forward_pattern: The regular expression pattern - that will be used for forwarding HTTP headers as inference request parameters. - http_thread_count: Number of threads handling HTTP requests. - allow_grpc: Allow the server to listen for GRPC requests. - grpc_address: The address for the grpc server to binds to. Default is 0.0.0.0. - grpc_port: The port for the server to listen on for GRPC requests. Default is 8001. - grpc_header_forward_pattern: The regular expression pattern that will be used - for forwarding GRPC headers as inference request parameters. - grpc_infer_allocation_pool_size: The maximum number of inference request/response objects - that remain allocated for reuse. As long as the number of in-flight requests doesn't exceed - this value there will be no allocation/deallocation of request/response objects. - grpc_use_ssl: Use SSL authentication for GRPC requests. Default is false. - grpc_use_ssl_mutual: Use mututal SSL authentication for GRPC requests. - This option will preempt grpc_use_ssl if it is also specified. Default is false. - grpc_server_cert: File holding PEM-encoded server certificate. Ignored unless grpc_use_ssl is true. - grpc_server_key: Path to file holding PEM-encoded server key. Ignored unless grpc_use_ssl is true. - grpc_root_cert: Path to file holding PEM-encoded root certificate. Ignored unless grpc_use_ssl is true. - grpc_infer_response_compression_level: The compression level to be used while returning the inference - response to the peer. Allowed values are none, low, medium and high. Default is none. - grpc_keepalive_time: The period (in milliseconds) after which a keepalive ping is sent on the transport. - grpc_keepalive_timeout: The period (in milliseconds) the sender of the keepalive ping waits - for an acknowledgement. - grpc_keepalive_permit_without_calls: Allows keepalive pings to be sent even if there are no calls in flight - grpc_http2_max_pings_without_data: The maximum number of pings that can be sent when there is no - data/header frame to be sent. - grpc_http2_min_recv_ping_interval_without_data: If there are no data/header frames being sent on the - transport, this channel argument on the server side controls the minimum time (in milliseconds) that - gRPC Core would expect between receiving successive pings. - grpc_http2_max_ping_strikes: Maximum number of bad pings that the server will tolerate before sending - an HTTP2 GOAWAY frame and closing the transport. - grpc_restricted_protocol: Specify restricted GRPC protocol setting. - The format of this flag is `,=`. - Where `` is a comma-separated list of protocols to be restricted. - `` will be additional header key to be checked when a GRPC request - is received, and `` is the value expected to be matched. - allow_metrics: Allow the server to provide prometheus metrics. - allow_gpu_metrics: Allow the server to provide GPU metrics. - allow_cpu_metrics: Allow the server to provide CPU metrics. - metrics_interval_ms: Metrics will be collected once every `` milliseconds. - metrics_port: The port reporting prometheus metrics. - metrics_address: The address for the metrics server to bind to. Default is the same as http_address. - allow_sagemaker: Allow the server to listen for Sagemaker requests. - sagemaker_port: The port for the server to listen on for Sagemaker requests. - sagemaker_safe_port_range: Set the allowed port range for endpoints other than the SageMaker endpoints. - sagemaker_thread_count: Number of threads handling Sagemaker requests. - allow_vertex_ai: Allow the server to listen for Vertex AI requests. - vertex_ai_port: The port for the server to listen on for Vertex AI requests. - vertex_ai_thread_count: Number of threads handling Vertex AI requests. - vertex_ai_default_model: The name of the model to use for single-model inference requests. - metrics_config: Specify a metrics-specific configuration setting. - The format of this flag is `=`. It can be specified multiple times - trace_config: Specify global or trace mode specific configuration setting. - The format of this flag is `,=`. - Where `` is either 'triton' or 'opentelemetry'. The default is 'triton'. - To specify global trace settings (level, rate, count, or mode), the format would be `=`. - For 'triton' mode, the server will use Triton's Trace APIs. - For 'opentelemetry' mode, the server will use OpenTelemetry's APIs to generate, - collect and export traces for individual inference requests. - More details, including supported settings can be found at [Triton trace guide](https://github.com/triton-inference-server/server/blob/main/docs/user_guide/trace.md). - cache_config: Specify a cache-specific configuration setting. - The format of this flag is `,=`. - Where `` is the name of the cache, such as 'local' or 'redis'. - Example: `local,size=1048576` will configure a 'local' cache implementation - with a fixed buffer pool of size 1048576 bytes. - cache_directory: The global directory searched for cache shared libraries. Default is '/opt/tritonserver/caches'. - This directory is expected to contain a cache implementation as a shared library with the name 'libtritoncache.so'. - buffer_manager_thread_count: The number of threads used to accelerate copies and other operations - required to manage input and output tensor contents. - """ - - model_repository: Optional[pathlib.Path] = None - id: Optional[str] = None - log_verbose: Optional[int] = None - log_file: Optional[pathlib.Path] = None - exit_timeout_secs: Optional[int] = None - exit_on_error: Optional[bool] = None - strict_readiness: Optional[bool] = None - allow_http: Optional[bool] = None - http_address: Optional[str] = None - http_port: Optional[int] = None - http_header_forward_pattern: Optional[str] = None - http_thread_count: Optional[int] = None - allow_grpc: Optional[bool] = None - grpc_address: Optional[str] = None - grpc_port: Optional[int] = None - grpc_header_forward_pattern: Optional[str] = None - grpc_infer_allocation_pool_size: Optional[int] = None - grpc_use_ssl: Optional[bool] = None - grpc_use_ssl_mutual: Optional[bool] = None - grpc_server_cert: Optional[pathlib.Path] = None - grpc_server_key: Optional[pathlib.Path] = None - grpc_root_cert: Optional[pathlib.Path] = None - grpc_infer_response_compression_level: Optional[str] = None - grpc_keepalive_time: Optional[int] = None - grpc_keepalive_timeout: Optional[int] = None - grpc_keepalive_permit_without_calls: Optional[bool] = None - grpc_http2_max_pings_without_data: Optional[int] = None - grpc_http2_min_recv_ping_interval_without_data: Optional[int] = None - grpc_http2_max_ping_strikes: Optional[int] = None - allow_metrics: Optional[bool] = None - allow_gpu_metrics: Optional[bool] = None - allow_cpu_metrics: Optional[bool] = None - metrics_interval_ms: Optional[int] = None - metrics_port: Optional[int] = None - metrics_address: Optional[str] = None - allow_sagemaker: Optional[bool] = None - sagemaker_port: Optional[int] = None - sagemaker_safe_port_range: Optional[str] = None - sagemaker_thread_count: Optional[int] = None - allow_vertex_ai: Optional[bool] = None - vertex_ai_port: Optional[int] = None - vertex_ai_thread_count: Optional[int] = None - vertex_ai_default_model: Optional[str] = None - metrics_config: Optional[List[str]] = None - trace_config: Optional[List[str]] = None - cache_config: Optional[List[str]] = None - cache_directory: Optional[str] = None - buffer_manager_thread_count: Optional[int] = None - - def __post_init__(self): - """Validate configuration for early error handling.""" - if self.allow_http not in [True, None] and self.allow_grpc not in [True, None]: - raise PyTritonValidationError("The `http` or `grpc` endpoint has to be allowed.") - - def to_dict(self): - """Map config object to dictionary.""" - return dataclasses.asdict(self) - - @classmethod - def from_dict(cls, config: Dict[str, Any]) -> "TritonConfig": - """Creates a ``TritonConfig`` instance from an input dictionary. Values are converted into correct types. - - Args: - config: a dictionary with all required fields - - Returns: - a ``TritonConfig`` instance - """ - fields: Dict[str, dataclasses.Field] = {field.name: field for field in dataclasses.fields(cls)} - unknown_config_parameters = {name: value for name, value in config.items() if name not in fields} - for name, value in unknown_config_parameters.items(): - LOGGER.warning( - f"Ignoring {name}={value} as could not find matching config field. " - f"Available fields: {', '.join(map(str, fields))}" - ) - - def _cast_value(_field, _value): - field_type = _field.type - is_optional = typing_inspect.is_optional_type(field_type) - if is_optional: - field_type = field_type.__args__[0] - if hasattr(field_type, "__origin__") and field_type.__origin__ is list: - return list(_value) if _value is not None else None - elif isinstance(_value, str) and isinstance(field_type, type) and issubclass(field_type, list): - return _value.split(",") - return field_type(_value) - - config_with_casted_values = { - name: _cast_value(fields[name], value) for name, value in config.items() if name in fields - } - return cls(**config_with_casted_values) - - @classmethod - def from_env(cls) -> "TritonConfig": - """Creates TritonConfig from environment variables. - - Environment variables should start with `PYTRITON_TRITON_CONFIG_` prefix. For example: - - PYTRITON_TRITON_CONFIG_GRPC_PORT=45436 - PYTRITON_TRITON_CONFIG_LOG_VERBOSE=4 - - Typical use: - - triton_config = TritonConfig.from_env() - - Returns: - TritonConfig class instantiated from environment variables. - """ - prefix = "PYTRITON_TRITON_CONFIG_" - config = {} - list_pattern = re.compile(r"^(.+?)_(\d+)$") - - for name, value in os.environ.items(): - if name.startswith(prefix): - key = name[len(prefix) :].lower() - match = list_pattern.match(key) - if match: - list_key, index = match.groups() - index = int(index) - if list_key not in config: - config[list_key] = [] - if len(config[list_key]) <= index: - config[list_key].extend([None] * (index + 1 - len(config[list_key]))) - config[list_key][index] = value - else: - config[key] = value - - # Remove None values from lists (in case of non-sequential indexes) - for key in config: - if isinstance(config[key], list): - config[key] = [item for item in config[key] if item is not None] - - return cls.from_dict(config) - - -@dataclasses.dataclass -class TritonLifecyclePolicy: - """Triton Inference Server lifecycle policy. - - Indicates when Triton server is launched and where the model store is located (locally or remotely managed by - Triton server). - """ - - launch_triton_on_startup: bool = True - local_model_store: bool = False - - -DefaultTritonLifecyclePolicy = TritonLifecyclePolicy() -VertextAILifecyclePolicy = TritonLifecyclePolicy(launch_triton_on_startup=False, local_model_store=True) - - -class _LogLevelChecker: - """Check if log level is too verbose.""" - - def __init__(self, url: str) -> None: - """Initialize LogLevelChecker. - - Args: - url: Triton Inference Server URL in form of ://: - - Raises: - PyTritonClientInvalidUrlError: if url is invalid - """ - self._log_settings = None - self._url = url - - def check(self, skip_update: bool = False): - """Check if log level is too verbose. - - Also obtains wait for server is ready + log settings from server if not already obtained. - - Raises: - PyTritonClientTimeoutError: if timeout is reached - """ - if self._log_settings is None and not skip_update: - with contextlib.closing(create_client_from_url(self._url)) as client: - wait_for_server_ready(client, timeout_s=DEFAULT_TRITON_STARTUP_TIMEOUT_S) - self._log_settings = client.get_log_settings() - - if self._log_settings is not None: - log_settings = self._log_settings - log_verbose_level = 0 - if hasattr(log_settings, "settings"): # grpc client - for key, value in log_settings.settings.items(): - if key == "log_verbose_level": - log_verbose_level = value.uint32_param - break - else: # http client - log_settings = {key: str(value) for key, value in log_settings.items()} - log_verbose_level = int(log_settings.get("log_verbose_level", 0)) - if log_verbose_level > 0: - LOGGER.warning( - f"Triton Inference Server is running with enabled verbose logs (log_verbose_level={log_verbose_level}). " - "It may affect inference performance." - ) - - -class TritonBase: - """Base class for Triton Inference Server.""" - - def __init__( - self, - url: str, - workspace: Union[Workspace, str, pathlib.Path, None] = None, - triton_lifecycle_policy: TritonLifecyclePolicy = DefaultTritonLifecyclePolicy, - ): - """Initialize TritonBase. - - Args: - url: Triton Inference Server URL in form of ://: - workspace: Workspace for storing communication sockets and the other temporary files. - triton_lifecycle_policy: policy indicating when Triton server is launched and where the model store is located - (locally or remotely managed by Triton server). - - """ - self._triton_lifecycle_policy = triton_lifecycle_policy - self._workspace = workspace if isinstance(workspace, Workspace) else Workspace(workspace) - self._url = url - _local_model_config_path = ( - self._workspace.model_store_path if triton_lifecycle_policy.local_model_store else None - ) - self._model_manager = ModelManager(self._url, _local_model_config_path) - self._cv = th.Condition() - self._triton_context = TritonContext() - self._log_level_checker = _LogLevelChecker(self._url) - - with self._cv: - self._stopped = True - self._connected = False - - atexit.register(self.stop) - - def bind( - self, - model_name: str, - infer_func: Union[Callable, Sequence[Callable]], - inputs: Sequence[Tensor], - outputs: Sequence[Tensor], - model_version: int = 1, - config: Optional[ModelConfig] = None, - strict: bool = False, - trace_config: Optional[List[str]] = None, - ) -> None: - """Create a model with given name and inference callable binding into Triton Inference Server. - - More information about model configuration: - https://github.com/triton-inference-server/server/blob/main/docs/user_guide/model_configuration.md - - Args: - infer_func: Inference callable to handle request/response from Triton Inference Server - (or list of inference callable for multi instance model) - inputs: Definition of model inputs - outputs: Definition of model outputs - model_name: Name under which model is available in Triton Inference Server. It can only contain - alphanumeric characters, dots, underscores and dashes. - model_version: Version of model - config: Model configuration for Triton Inference Server deployment - strict: Enable strict validation between model config outputs and inference function result - trace_config: List of trace config parameters - """ - self._validate_model_name(model_name) - model_kwargs = {} - if trace_config is None: - triton_config = getattr(self, "_config", None) - if triton_config is not None: - trace_config = getattr(triton_config, "trace_config", None) - if trace_config is not None: - LOGGER.info(f"Using trace config from TritonConfig: {trace_config}") - model_kwargs["trace_config"] = trace_config - else: - model_kwargs["trace_config"] = trace_config - telemetry_tracer = get_telemetry_tracer() - - # Automatically set telemetry tracer if not set at the proxy side - if telemetry_tracer is None and trace_config is not None: - LOGGER.info("Setting telemetry tracer from TritonConfig") - telemetry_tracer = build_proxy_tracer_from_triton_config(trace_config) - set_telemetry_tracer(telemetry_tracer) - - model = Model( - model_name=model_name, - model_version=model_version, - inference_fn=infer_func, - inputs=inputs, - outputs=outputs, - config=config if config else ModelConfig(), - workspace=self._workspace, - triton_context=self._triton_context, - strict=strict, - **model_kwargs, - ) - model.on_model_event(self._on_model_event) - - self._model_manager.add_model(model, self.is_connected()) - - def connect(self) -> None: - """Connect to Triton Inference Server. - - Raises: - TimeoutError: if Triton Inference Server is not ready after timeout - """ - with self._cv: - if self._connected: - LOGGER.debug("Triton Inference already connected.") - return - - self._wait_for_server() - if self._triton_lifecycle_policy.local_model_store: - self._model_manager.setup_models() - else: - self._model_manager.load_models() - - self._wait_for_models() - self._connected = True - - def serve(self, monitoring_period_s: float = MONITORING_PERIOD_S) -> None: - """Run Triton Inference Server and lock thread for serving requests/response. - - Args: - monitoring_period_s: the timeout of monitoring if Triton and models are available. - Every monitoring_period_s seconds main thread wakes up and check if triton server and proxy backend - are still alive and sleep again. If triton or proxy is not alive - method returns. - """ - self.connect() - with self._cv: - try: - while self.is_alive(): - self._cv.wait(timeout=monitoring_period_s) - except KeyboardInterrupt: - LOGGER.info("SIGINT received, exiting.") - self.stop() - - def stop(self) -> bool: - """Stop Triton Inference Server and clean workspace.""" - with self._cv: - if self._stopped: - LOGGER.debug("Triton Inference already stopped.") - return False - self._stopped = True - self._connected = False - atexit.unregister(self.stop) - self._pre_stop_impl() - self._model_manager.clean() - self._workspace.clean() - - with self._cv: - self._cv.notify_all() - LOGGER.debug("Stopped Triton Inference server and proxy backends") - self._log_level_checker.check(skip_update=True) - - return True - - def is_alive(self) -> bool: - """Check if Triton Inference Server is alive.""" - if not self._is_alive_impl(): - return False - - for model in self._model_manager.models: - if not model.is_alive(): - return False - return True - - def is_connected(self) -> bool: - """Check if Triton Inference Server is connected.""" - with self._cv: - return self._connected - - def __enter__(self): - """Connects to Triton server on __enter__. - - Returns: - A Triton object - """ - if self._triton_lifecycle_policy.launch_triton_on_startup: - self.connect() - return self - - def __exit__(self, *_) -> None: - """Exit the context stopping the process and cleaning the workspace. - - Args: - *_: unused arguments - """ - self.stop() - - def _is_alive_impl(self) -> bool: - return True - - def _pre_stop_impl(self): - pass - - def _post_stop_impl(self): - pass - - def _wait_for_server(self) -> None: - """Wait for Triton Inference Server to be ready.""" - self._log_level_checker.check() - try: - with contextlib.closing(create_client_from_url(self._url)) as client: - wait_for_server_ready(client, timeout_s=DEFAULT_TRITON_STARTUP_TIMEOUT_S) - except TimeoutError as e: - LOGGER.warning( - f"Could not verify locally if Triton Inference Server is ready using {self._url}. " - "Please, check the server logs for details." - ) - raise TimeoutError("Triton Inference Server is not ready after timeout.") from e - - def _wait_for_models(self) -> None: - """Log loaded models in console to show the available endpoints.""" - self._log_level_checker.check() - - try: - for model in self._model_manager.models: - with ModelClient( - url=self._url, model_name=model.model_name, model_version=str(model.model_version) - ) as client: - # This waits for only tritonserver and lightweight proxy backend to be ready - # timeout should be short as model is loaded before execution of Triton.start() method - client.wait_for_model(timeout_s=WAIT_FORM_MODEL_TIMEOUT_S) - except TimeoutError: - LOGGER.warning( - f"Could not verify locally if models are ready using {self._url}. " - "Please, check the server logs for details." - ) - - for model in self._model_manager.models: - LOGGER.info(f"Infer function available as model: `{MODEL_URL.format(model_name=model.model_name)}`") - LOGGER.info(f" Status: `GET {MODEL_READY_URL.format(model_name=model.model_name)}`") - LOGGER.info(f" Model config: `GET {MODEL_CONFIG_URL.format(model_name=model.model_name)}`") - LOGGER.info(f" Inference: `POST {MODEL_INFER_URL.format(model_name=model.model_name)}`") - - LOGGER.info( - """Read more about configuring and serving models in """ - """documentation: https://triton-inference-server.github.io/pytriton.""" - ) - LOGGER.info(f"(Press CTRL+C or use the command `kill -SIGINT {os.getpid()}` to send a SIGINT signal and quit)") - - def _on_model_event(self, model: Model, event: ModelEvent, context: typing.Optional[typing.Any] = None): - LOGGER.info(f"Received {event} from {model}; context={context}") - - if event in [ModelEvent.RUNTIME_TERMINATING, ModelEvent.RUNTIME_TERMINATED]: - threading.Thread(target=self.stop).start() - - @classmethod - def _validate_model_name(cls, model_name: str) -> None: - """Validate model name. - - Args: - model_name: Model name - """ - if not model_name: - raise PyTritonValidationError("Model name cannot be empty") - - if not re.match(r"^[a-zA-Z0-9._-]+$", model_name): - raise PyTritonValidationError( - "Model name can only contain alphanumeric characters, dots, underscores and dashes" - ) - - -class Triton(TritonBase): - """Triton Inference Server for Python models.""" - - def __init__( - self, - *, - config: Optional[TritonConfig] = None, - workspace: Union[Workspace, str, pathlib.Path, None] = None, - triton_lifecycle_policy: Optional[TritonLifecyclePolicy] = None, - ): - """Initialize Triton Inference Server context for starting server and loading models. - - Args: - config: TritonConfig object with optional customizations for Triton Inference Server. - Configuration can be passed also through environment variables. - See [TritonConfig.from_env()][pytriton.triton.TritonConfig.from_env] class method for details. - - Order of precedence: - - - config defined through `config` parameter of init method. - - config defined in environment variables - - default TritonConfig values - workspace: workspace or path where the Triton Model Store and files used by pytriton will be created. - If workspace is `None` random workspace will be created. - Workspace will be deleted in [Triton.stop()][pytriton.triton.Triton.stop]. - triton_lifecycle_policy: policy indicating when Triton server is launched and where the model store is located - (locally or remotely managed by Triton server). If triton_lifecycle_policy is None, - DefaultTritonLifecyclePolicy is used by default (Triton server is launched on startup and model store is not local). - Only if triton_lifecycle_policy is None and config.allow_vertex_ai is True, VertextAILifecyclePolicy is used instead. - """ - _triton_lifecycle_policy = ( - VertextAILifecyclePolicy - if triton_lifecycle_policy is None and config is not None and config.allow_vertex_ai - else triton_lifecycle_policy - ) or DefaultTritonLifecyclePolicy - - def _without_none_values(_d): - return {name: value for name, value in _d.items() if value is not None} - - default_config_dict = _without_none_values(TritonConfig().to_dict()) - env_config_dict = _without_none_values(TritonConfig.from_env().to_dict()) - explicit_config_dict = _without_none_values(config.to_dict() if config else {}) - config_dict = {**default_config_dict, **env_config_dict, **explicit_config_dict} - self._config = TritonConfig(**config_dict) - workspace_instance = workspace if isinstance(workspace, Workspace) else Workspace(workspace) - self._prepare_triton_config(workspace_instance) - endpoint_protocol = "http" if self._config.allow_http in [True, None] else "grpc" - super().__init__( - url=endpoint_utils.get_endpoint(self._triton_server_config, endpoint_protocol), - workspace=workspace_instance, - triton_lifecycle_policy=_triton_lifecycle_policy, - ) - self._triton_server = None - - def __enter__(self) -> "Triton": - """Entering the context launches the triton server. - - Returns: - A Triton object - """ - if self._triton_lifecycle_policy.launch_triton_on_startup: - self._run_server() - super().__enter__() - return self - - def run(self) -> None: - """Run Triton Inference Server.""" - self._run_server() - self.connect() - - def serve(self, monitoring_period_s: float = MONITORING_PERIOD_S) -> None: - """Run Triton Inference Server and lock thread for serving requests/response. - - Args: - monitoring_period_s: the timeout of monitoring if Triton and models are available. - Every monitoring_period_s seconds main thread wakes up and check if triton server and proxy backend - are still alive and sleep again. If triton or proxy is not alive - method returns. - """ - self._run_server() - super().serve(monitoring_period_s=monitoring_period_s) - - def _initialize_server(self) -> None: - """Initialize Triton Inference Server before binary execution.""" - self._triton_inference_server_path = self._prepare_triton_inference_server() - self._triton_server = TritonServer( - path=(self._triton_inference_server_path / "bin" / "tritonserver").as_posix(), - libs_path=get_libs_path(), - config=self._triton_server_config, - ) - - url = ( - self._triton_server.get_endpoint("http") - if (self._config.allow_http is None or self._config.allow_http) - else self._triton_server.get_endpoint("grpc") - ) - self._log_level_checker = _LogLevelChecker(url) - - def _prepare_triton_config(self, workspace: Workspace) -> None: - self._triton_server_config = TritonServerConfig() - config_data = self._config.to_dict() - self._python_backend_config = PythonBackendConfig() - python_backend_config_data = { - "shm-region-prefix-name": self._shm_prefix(), - "shm-default-byte-size": INITIAL_BACKEND_SHM_SIZE, - "shm-growth-byte-size": GROWTH_BACKEND_SHM_SIZE, - } - for name, value in python_backend_config_data.items(): - if name not in PythonBackendConfig.allowed_keys() or value is None: - continue - - if isinstance(value, pathlib.Path): - value = value.as_posix() - self._python_backend_config[name] = value - for name, value in config_data.items(): - if name not in TritonServerConfig.allowed_keys() or value is None: - continue - - if isinstance(value, pathlib.Path): - value = value.as_posix() - self._triton_server_config[name] = value - - self._triton_server_config["model_control_mode"] = "explicit" - self._triton_server_config["load-model"] = "*" - self._triton_server_config["backend_config"] = self._python_backend_config.to_list_args() - if "model_repository" not in self._triton_server_config: - self._triton_server_config["model_repository"] = workspace.model_store_path.as_posix() - - def _prepare_triton_inference_server(self) -> pathlib.Path: - """Prepare binaries and libraries of Triton Inference Server for execution. - - Return: - Path where Triton binaries are ready for execution - """ - triton_inference_server_path = self._workspace.path / "tritonserver" - - LOGGER.debug("Preparing Triton Inference Server binaries and libs for execution.") - shutil.copytree( - TRITONSERVER_DIST_DIR, - triton_inference_server_path, - ignore=shutil.ignore_patterns("python_backend_stubs", "triton_python_backend_stub"), - ) - LOGGER.debug(f"Triton Inference Server binaries copied to {triton_inference_server_path} without stubs.") - - major = sys.version_info[0] - minor = sys.version_info[1] - version = f"{major}.{minor}" - - src_stub_path = get_stub_path(version) - dst_stub_path = triton_inference_server_path / "backends" / "python" / "triton_python_backend_stub" - - LOGGER.debug(f"Copying stub for version {version} from {src_stub_path} to {dst_stub_path}") - shutil.copy(src_stub_path, dst_stub_path) - - LOGGER.debug(f"Triton Inference Server binaries ready in {triton_inference_server_path}") - - self._triton_server_config["backend_directory"] = (triton_inference_server_path / "backends").as_posix() - if "cache_directory" not in self._triton_server_config: - self._triton_server_config["cache_directory"] = (triton_inference_server_path / "caches").as_posix() - return triton_inference_server_path - - def _shm_prefix(self) -> str: - """Generate unique prefix for shm memory. - - Returns: - String with prefix - """ - hash = codecs.encode(os.urandom(4), "hex").decode() - pid = os.getpid() - return f"pytrtion{pid}-{hash}" - - def _run_server(self): - """Run Triton Inference Server.""" - if self._triton_server is None: - self._initialize_server() - if not self._triton_server.is_alive(): - with self._cv: - self._stopped = False - LOGGER.debug("Starting Triton Inference") - self._triton_server.register_on_exit(self._on_tritonserver_exit) - self._triton_server.start() - - def _is_alive_impl(self) -> bool: - """Verify is deployed models and server are alive. - - Returns: - True if server and loaded models are alive, False otherwise. - """ - if not self._triton_server: - return False - - return self._triton_server.is_alive() - - def _pre_stop_impl(self): - self._triton_server.unregister_on_exit(self._on_tritonserver_exit) - if self._triton_server is not None: - self._triton_server.stop() - - def _on_tritonserver_exit(self, *_) -> None: - """Handle the Triton Inference Server process exit. - - Args: - _: unused arguments - """ - LOGGER.debug("Got callback that tritonserver process finished") - self.stop() - - -class RemoteTriton(TritonBase): - """RemoteTriton connects to Triton Inference Server running on remote host.""" - - def __init__(self, url: str, workspace: Union[Workspace, str, pathlib.Path, None] = None): - """Initialize RemoteTriton. - - Args: - url: Triton Inference Server URL in form of ://: - If scheme is not provided, http is used as default. - If port is not provided, 8000 is used as default for http and 8001 for grpc. - workspace: path to be created where the files used by pytriton will be stored - (e.g. socket files for communication). - If workspace is `None` temporary workspace will be created. - Workspace should be created in shared filesystem space between RemoteTriton - and Triton Inference Server to allow access to socket files - (if you use containers, folder must be shared between containers). - - """ - super().__init__( - url=TritonUrl.from_url(url).with_scheme, - workspace=workspace, - triton_lifecycle_policy=TritonLifecyclePolicy(launch_triton_on_startup=True, local_model_store=False), - ) - - with self._cv: - self._stopped = False - - def __enter__(self) -> "RemoteTriton": - """Entering the context connects to remote Triton server. - - Returns: - A RemoteTriton object - """ - super().__enter__() - return self diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/utils/__init__.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/utils/__init__.py deleted file mode 100644 index 8010bd32129eb99ce3ce66981b81d3ba41bf287b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/utils/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# noqa: D104 diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/utils/dataclasses.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/utils/dataclasses.py deleted file mode 100644 index 2bb86bb71a7ffadf9745452d18f9604a68fbc8fe..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/utils/dataclasses.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Collection of utils for dataclasses.""" - -import wrapt - - -@wrapt.decorator -def kwonly_dataclass(wrapped, instance, args, kwargs): - """Poor dataclass wrapper to have init method keyword-only. - - Dataclass keyword-only arguments are available since Python 3.10. - - Example usage: - - @kwonly_dataclass - @dataclass.dataclasses - class MyDataClass: - a: int - b: str - - my_dataclass = MyDataClass(a=1, b="hello") - MyDataClass(1, "hello") # raises TypeError - """ - if args: - raise TypeError(f"{wrapped.__name__} initialization can't be used with positional arguments") - return wrapped(**kwargs) diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/utils/distribution.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/utils/distribution.py deleted file mode 100644 index 1c10f78187269366d9d1153da3f6052abbe3aa29..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/utils/distribution.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Set of utils to obtain properties of pytriton distribution.""" - -import logging -import pathlib -import site - -LOGGER = logging.getLogger(__name__) - - -def get_root_module_path() -> pathlib.Path: - """Obtain path to pytriton module. - - Returns: - Path to pytriton root module in site or if installed in editable model - local. - """ - pytriton_module_path = pathlib.Path(__file__).parent.parent - LOGGER.debug("Obtained pytriton module path: %s", pytriton_module_path) - return pytriton_module_path - - -def is_editable_install() -> bool: - """Checks if pytriton is installed in editable mode. - - Returns: - True if pytriton is installed in editable mode, False otherwise. - """ - editable_mode = True - site_packages = site.getsitepackages() + [site.getusersitepackages()] - pytriton_module_path = get_root_module_path() - for site_package in site_packages: - try: - pytriton_module_path.relative_to(site_package) - editable_mode = False - break - except ValueError: - pass - LOGGER.debug("pytriton is installed in editable mode: %s", editable_mode) - return editable_mode - - -def get_libs_path(): - """Obtains path to directory with external libraries required by library. - - Returns: - Path to directory with external libraries required by library. - """ - pytriton_module_path = get_root_module_path() - if is_editable_install(): - libs_path = pytriton_module_path / "tritonserver/external_libs" - else: - libs_path = pytriton_module_path.parent / "nvidia_pytriton.libs" - LOGGER.debug("Obtained nvidia_pytriton.libs path: %s", libs_path) - return libs_path - - -def get_stub_path(version: str): - """Obtains path stub file for provided Python interpreter version. - - Args: - version: Python interpreter version - - Returns: - Path to stub file for given Python version - """ - pytriton_module_path = get_root_module_path() - stub_path = pytriton_module_path / "tritonserver" / "python_backend_stubs" / version / "triton_python_backend_stub" - LOGGER.debug("Obtained pytriton stubs path for %s: %s", version, stub_path) - return stub_path diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/utils/endpoint_utils.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/utils/endpoint_utils.py deleted file mode 100644 index f58884f703f469e53251daa5298900db22797f44..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/utils/endpoint_utils.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Endpoint url forming utilities module.""" - -import re -from typing import Literal - -from pytriton.constants import DEFAULT_GRPC_PORT, DEFAULT_HTTP_PORT, DEFAULT_METRICS_PORT, TRITON_LOCAL_IP -from pytriton.server.triton_server_config import TritonServerConfig - - -def get_endpoint(server_config: TritonServerConfig, endpoint: Literal["http", "grpc", "metrics"]) -> str: - """Get endpoint url. - - Args: - server_config: TritonServerConfig object - endpoint: endpoint name - - Returns: - endpoint url in form of {protocol}://{host}:{port} - """ - protocols = {"http": "http", "grpc": "grpc", "metrics": "http"} - - def _obtain_address(key_names): - for key_name in key_names: - address = server_config[key_name] - if address and not re.match(r"^0+.0+.0+.0+$", address): - break - else: - address = TRITON_LOCAL_IP - - return address - - addresses = { - "http": _obtain_address(["http-address"]), - "grpc": _obtain_address(["grpc-address"]), - "metrics": _obtain_address(["metrics-address", "http-address"]), - } - ports = { - "http": server_config["http-port"] or DEFAULT_HTTP_PORT, - "grpc": server_config["grpc-port"] or DEFAULT_GRPC_PORT, - "metrics": server_config["metrics-port"] or DEFAULT_METRICS_PORT, - } - - return f"{protocols[endpoint]}://{addresses[endpoint]}:{ports[endpoint]}" diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/utils/logging.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/utils/logging.py deleted file mode 100644 index 26f57b67ec031074c045f81ef72c1c5a3f4aacae..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/utils/logging.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Module with logging related utils.""" - -import logging - - -def silence_3rd_party_loggers(): - """Silence 3rd party libraries which adds enormous number of log lines on DEBUG level.""" - logging.getLogger("sh.command").setLevel(logging.WARNING) - logging.getLogger("sh.stream_bufferer").setLevel(logging.WARNING) - logging.getLogger("sh.streamreader").setLevel(logging.WARNING) diff --git a/stf/stf-api-alternative/pytriton/build/lib/pytriton/utils/workspace.py b/stf/stf-api-alternative/pytriton/build/lib/pytriton/utils/workspace.py deleted file mode 100644 index 2725fa8487c8d85a3fa9f1b2f75fee221d7d6cdc..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/build/lib/pytriton/utils/workspace.py +++ /dev/null @@ -1,120 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Workspace class for handling space to store artifacts.""" - -import logging -import pathlib -import shutil -import tempfile -import warnings -from typing import Optional, Union - - -class CleanupWarning(UserWarning): - """Warning for cleanup issues.""" - - pass - - -LOGGER = logging.getLogger(__name__) - - -class Workspace: - """Class for storing the workspace information.""" - - def __init__(self, workspace_path: Optional[Union[str, pathlib.Path]] = None): - """Initialize workspace in the provided path or create workspace in default location. - - Args: - workspace_path: Path to a directory where workspace has to be created (optional). - If not provided workspace with random name will be created in ~/.cache/pytriton directory. - - Raises: - FileExistsError: in case workspace already exists. - """ - if workspace_path is None: - from pytriton.constants import PYTRITON_HOME - - PYTRITON_HOME.mkdir(parents=True, exist_ok=True) - self._tmp_dir = tempfile.TemporaryDirectory(dir=PYTRITON_HOME, prefix="workspace_") - self._workspace_path = pathlib.Path(self._tmp_dir.name).resolve() - LOGGER.debug(f"Workspace path {self._workspace_path}") - else: - self._tmp_dir = None - self._workspace_path = pathlib.Path(workspace_path).resolve() - LOGGER.debug(f"Workspace path {self._workspace_path}") - self._workspace_path.mkdir(parents=True) - - self.model_store_path = self._workspace_path / "model-store" - self.model_store_path.mkdir(parents=True) - - @property - def path(self) -> pathlib.Path: - """Return path to the workspace. - - Returns: - Path object with location of workspace catalog - """ - return self._workspace_path - - def exists(self) -> bool: - """Verify if workspace catalog exists. - - Returns: - True if workspace catalog exists. False otherwise. - """ - return self._workspace_path.exists() - - def is_empty(self) -> bool: - """Verify if workspace contains any files or folders. - - Returns: - True if workspace is not empty. False otherwise. - """ - all_files = list(self.path.rglob("*")) - if len(all_files) == 0: - return True - for p in all_files: - rel_p = p.relative_to(self.path) - if rel_p.parts and not rel_p.parts[0].startswith("."): - LOGGER.warning(f"Non empty path {p}") - return False - return True - - def clean(self) -> None: - """Clean workspace removing files and directories created inside including the workspace itself.""" - LOGGER.debug(f"Cleaning workspace dir {self.path}") - - try: - for child in self.path.rglob("*"): - rel_p = child.relative_to(self.path) - if len(rel_p.parts) == 0 or rel_p.parts[0].startswith("."): - continue - if child.is_dir(): - LOGGER.debug(f"Cleaning workspace dir {child}") - shutil.rmtree(child, ignore_errors=True) - else: - LOGGER.debug(f"Cleaning workspace file {child}") - child.unlink() - if not self.is_empty(): - raise OSError(f"Could not clean {self.path} workspace") - if self.path.exists(): - LOGGER.debug(f"Removing workspace dir {self.path}") - self.path.rmdir() - except OSError as e: - warnings.warn( - f"Could not clean workspace {self.path}. {e}", - CleanupWarning, - stacklevel=1, - ) diff --git a/stf/stf-api-alternative/pytriton/docs/README.md b/stf/stf-api-alternative/pytriton/docs/README.md deleted file mode 100644 index 6f5fcf2240a46f8c8157930668af76af49215883..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/README.md +++ /dev/null @@ -1,194 +0,0 @@ - - -# PyTriton - -PyTriton is a Flask/FastAPI-like interface that simplifies Triton's deployment in Python environments. -The library allows serving Machine Learning models directly from Python through -NVIDIA's [Triton Inference Server](https://github.com/triton-inference-server). - -## How it works? - -In PyTriton, as in Flask or FastAPI, you can define any Python function that executes a machine learning model prediction and exposes -it through an HTTP/gRPC API. PyTriton installs Triton Inference Server in your environment and uses it for handling -HTTP/gRPC requests and responses. Our library provides a Python API that allows attaching a Python function to Triton -and a communication layer to send/receive data between Triton and the function. This solution helps utilize the -performance features of Triton Inference Server, such as dynamic batching or response cache, without changing your model -environment. Thus, it improves the performance of running inference on GPU for models implemented in Python. The solution is -framework-agnostic and can be used along with frameworks like PyTorch, TensorFlow, or JAX. - -## Architecture - -The diagram below presents the schema of how the Python models are served through Triton Inference Server using -PyTriton. The solution consists of two main components: - -- Triton Inference Server: for exposing the HTTP/gRPC API and benefiting from performance features like dynamic batching -or response cache. -- Python Model Environment: your environment where the Python model is executed. - - -The Triton Inference Server binaries are provided as part of the PyTriton installation. The Triton Server is -installed in your current environment (system or container). The PyTriton controls the Triton Server process -through the `Triton Controller`. - -Exposing the model through PyTriton requires the definition of an `Inference Callable` - a Python function that is -connected to Triton Inference Server and executes the model or ensemble for predictions. The integration layer binds -the `Inference Callable` to Triton Server and exposes it through the Triton HTTP/gRPC API under a provided ``. Once -the integration is done, the defined `Inference Callable` receives data sent to the HTTP/gRPC API endpoint -`v2/models//infer`. Read more about HTTP/gRPC interface in Triton Inference Server -[documentation](https://github.com/triton-inference-server/server/blob/main/docs/customization_guide/inference_protocols.md#httprest-and-grpc-protocols). - -The HTTP/gRPC requests sent to `v2/models//infer` are handled by Triton -Inference Server. The server batches requests and passes them to the `Proxy Backend`, which sends the batched requests to the appropriate -`Inference Callable`. The data is sent as a `numpy` array. Once the `Inference Callable` finishes execution of -the model prediction, the result is returned to the `Proxy Backend`, and a response is created by Triton Server. - -![High Level Design](assets/hld.svg) - -## Serving the models - -PyTriton provides an option to serve your Python model using Triton Inference Server to -handle HTTP/gRPC -requests and pass the input/output tensors to and from the model. We use a blocking mode where the application is a -long-lived process deployed in your cluster to serve the requests from clients. - -Before you run the model for serving the inference callback function, it has to be defined. The inference callback receives the -inputs and should return the model outputs: - -```python -import numpy as np -from pytriton.decorators import batch - - -@batch -def infer_fn(**inputs: np.ndarray): - input1, input2 = inputs.values() - outputs = model(input1, input2) - return [outputs] -``` - -The `infer_fn` receives the batched input data for the model and should return the batched outputs. - -In the next step, you need to create a connection between Triton and the model. For that purpose, the `Triton` class has to -be used, and the `bind` method is required to be called to create a dedicated connection between Triton Inference -Server and the defined `infer_fn`. - -In the blocking mode, we suggest using the `Triton` object as a context manager where multiple models can be loaded in -the way presented below: - - -```python -from pytriton.triton import Triton -from pytriton.model_config import ModelConfig, Tensor - -with Triton() as triton: - triton.bind( - model_name="MyModel", - infer_func=infer_fn, - inputs=[ - Tensor(dtype=bytes, shape=(1,)), # sample containing single bytes value - Tensor(dtype=bytes, shape=(-1,)), # sample containing vector of bytes - ], - outputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=16), - ) -``` - -At this point, you have defined how the model has to be handled by Triton and where the HTTP/gRPC requests for the model have -to be directed. The last part for serving the model is to call the `serve` method on the Triton object: - - - -```python -with Triton() as triton: - # ... - triton.serve() -``` - -When the `.serve()` method is called on the `Triton` object, the inference queries can be sent to -`localhost:8000/v2/models/MyModel`, and the `infer_fn` is called to handle the inference query. - -## Working in the Jupyter Notebook - -The package provides an option to work with your model inside the Jupyter Notebook. We call it a -background mode where -the model is deployed on Triton Inference Server for handling HTTP/gRPC requests, but there are other actions that you -want to perform after loading and starting serving the model. - -Having the `infer_fn` defined in the same way as described in the [serving the models](#serving-the-models) section, you -can use the `Triton` object without a context: - - -```python -from pytriton.triton import Triton -triton = Triton() -``` - -In the next step, the model has to be loaded for serving in Triton Inference Server (which is also the same -as in the serving example): - - - -```python -import numpy as np -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor - -@batch -def infer_fn(**inputs: np.ndarray): - input1, input2 = inputs.values() - outputs = input1 + input2 - return [outputs] - -triton.bind( - model_name="MyModel", - infer_func=infer_fn, - inputs=[ - Tensor(shape=(1,), dtype=np.float32), - Tensor(shape=(-1,), dtype=np.float32), - ], - outputs=[Tensor(shape=(-1,), dtype=np.float32)], - config=ModelConfig(max_batch_size=16), -) -``` - -Finally, to run the model in background mode, use the `run` method: - - -```python -triton.run() -``` - -When the `.run()` method is called on the `Triton` object, the inference queries can be sent to -`localhost:8000/v2/models/MyModel`, and the `infer_fn` is called to handle the inference query. - -The Triton server can be stopped at any time using the `stop` method: - - - -```python -triton.stop() -``` - -## What next? - -Read more about using PyTriton in the [Quick Start](quick_start.md), [Examples](examples.md) and -find more options on how to configure Triton, models, and deployment on a cluster in the [Deploying Models](initialization.md) -section. - -The details about classes and methods can be found in the [API Reference](api.md) page. diff --git a/stf/stf-api-alternative/pytriton/docs/api.md b/stf/stf-api-alternative/pytriton/docs/api.md deleted file mode 100644 index 47c63a5f90e0c6acc7098e5cca4bc703d2279d4c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/api.md +++ /dev/null @@ -1,34 +0,0 @@ - - -# API Reference - -::: pytriton.triton.TritonConfig - -::: pytriton.decorators - -::: pytriton.triton.Triton - -::: pytriton.triton.RemoteTriton - -::: pytriton.model_config.tensor.Tensor - -::: pytriton.model_config.common - -::: pytriton.model_config.model_config.ModelConfig - -::: pytriton.client.client - diff --git a/stf/stf-api-alternative/pytriton/docs/assets/favicon.png b/stf/stf-api-alternative/pytriton/docs/assets/favicon.png deleted file mode 100644 index e4e2e9842d66937b4460269bdf37ac35248ad8c4..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/pytriton/docs/assets/favicon.png and /dev/null differ diff --git a/stf/stf-api-alternative/pytriton/docs/assets/hld.svg b/stf/stf-api-alternative/pytriton/docs/assets/hld.svg deleted file mode 100644 index 523a24e00b369efacc87872eca626a8a2fc8cdbc..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/assets/hld.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/docs/assets/logo.png b/stf/stf-api-alternative/pytriton/docs/assets/logo.png deleted file mode 100644 index 6ea93bd455483e061bead8684eeac1f64ebbdd62..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/pytriton/docs/assets/logo.png and /dev/null differ diff --git a/stf/stf-api-alternative/pytriton/docs/assets/styles.css b/stf/stf-api-alternative/pytriton/docs/assets/styles.css deleted file mode 100644 index e208a6dffb29522db51d25a6478930d2ff77eb25..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/assets/styles.css +++ /dev/null @@ -1,18 +0,0 @@ -/* -Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -.md-header__button.md-logo :is(img,svg) { - height: 1.8rem; !important; -} \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/docs/binding_configuration.md b/stf/stf-api-alternative/pytriton/docs/binding_configuration.md deleted file mode 100644 index 7e2bf5bd73aa6735ae4af06c7e45bef34aee26f3..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/binding_configuration.md +++ /dev/null @@ -1,120 +0,0 @@ - -# Binding Configuration - -The additional configuration of binding the model for running a model through the Triton Inference Server can be -provided in the `config` argument in the `bind` method. This section describes the possible configuration enhancements. -The configuration of the model can be adjusted by overriding the defaults for the `ModelConfig` object. - -```python -from pytriton.model_config.common import DynamicBatcher - -class ModelConfig: - batching: bool = True - max_batch_size: int = 4 - batcher: DynamicBatcher = DynamicBatcher() - response_cache: bool = False -``` - -## Batching - -The batching feature collects one or more samples and passes them to the model together. The model processes -multiple samples at the same time and returns the output for all the samples processed together. - -Batching can significantly improve throughput. Processing multiple samples at the same time leverages the benefits of -utilizing GPU performance for inference. - -The Triton Inference Server is responsible for collecting multiple incoming requests into a single batch. The batch is -passed to the model, which improves the inference performance (throughput and latency). This feature is called -`dynamic batching`, which collects samples from multiple clients into a single batch processed by the model. - -On the PyTriton side, the `infer_fn` obtain the fully created batch by Triton Inference Server so the only -responsibility is to perform computation and return the output. - -By default, batching is enabled for the model. The default behavior for Triton is to have dynamic batching enabled. -If your model does not support batching, use `batching=False` to disable it in Triton. - -## Maximal batch size - -The maximal batch size defines the number of samples that can be processed at the same time by the model. This configuration -has an impact not only on throughput but also on memory usage, as a bigger batch means more data loaded to the memory -at the same time. - -The `max_batch_size` has to be a value greater than or equal to 1. - -## Dynamic batching - -The dynamic batching is a Triton Inference Server feature and can be configured by defining the `DynamicBatcher` -object: - -```python -from typing import Dict, Optional -from pytriton.model_config.common import QueuePolicy - -class DynamicBatcher: - max_queue_delay_microseconds: int = 0 - preferred_batch_size: Optional[list] = None - preserve_ordering: bool = False - priority_levels: int = 0 - default_priority_level: int = 0 - default_queue_policy: Optional[QueuePolicy] = None - priority_queue_policy: Optional[Dict[int, QueuePolicy]] = None -``` - -More about dynamic batching can be found in -the [Triton Inference Server documentation](https://github.com/triton-inference-server/server/blob/main/docs/user_guide/model_configuration.md#dynamic-batcher) -and [API spec](api.md) - -## Response cache - -The Triton Inference Server provides functionality to use a cached response for the model. To use the response cache: - -- provide the `cache_config` in `TritonConfig` -- set `response_cache=True` in `ModelConfig` - -More about response cache can be found in the [Triton Response Cache](https://github.com/triton-inference-server/server/blob/main/docs/user_guide/response_cache.md) page. - -Example: - - -```python -import numpy as np - -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton, TritonConfig - -triton_config = TritonConfig( - cache_config=[f"local,size={1024 * 1024}"], # 1MB -) - -@batch -def _add_sub(**inputs): - a_batch, b_batch = inputs.values() - add_batch = a_batch + b_batch - sub_batch = a_batch - b_batch - return {"add": add_batch, "sub": sub_batch} - -with Triton(config=triton_config) as triton: - triton.bind( - model_name="AddSub", - infer_func=_add_sub, - inputs=[Tensor(shape=(1,), dtype=np.float32), Tensor(shape=(1,), dtype=np.float32)], - outputs=[Tensor(shape=(1,), dtype=np.float32), Tensor(shape=(1,), dtype=np.float32)], - config=ModelConfig(max_batch_size=8, response_cache=True) - ) - ... -``` diff --git a/stf/stf-api-alternative/pytriton/docs/binding_models.md b/stf/stf-api-alternative/pytriton/docs/binding_models.md deleted file mode 100644 index d400cf89ba6c2d0c00983209ff12a85e50498480..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/binding_models.md +++ /dev/null @@ -1,273 +0,0 @@ - -# Binding Models to Triton - -The Triton class provides methods to bind one or multiple models to the Triton server in order to expose HTTP/gRPC -endpoints for inference serving: - - -```python -import numpy as np -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton - - -@batch -def infer_fn(**inputs: np.ndarray): - input1, input2 = inputs.values() - outputs = model(input1, input2) - return [outputs] - -with Triton() as triton: - triton.bind( - model_name="ModelName", - infer_func=infer_fn, - inputs=[ - Tensor(shape=(1,), dtype=np.bytes_), # sample containing single bytes value - Tensor(shape=(-1,), dtype=np.bytes_) # sample containing vector of bytes - ], - outputs=[ - Tensor(shape=(-1,), dtype=np.float32), - ], - config=ModelConfig(max_batch_size=8), - strict=True, - ) -``` - -The `bind` method's mandatory arguments are: - -- `model_name`: defines under which name the model is available in Triton Inference Server -- `infer_func`: function or Python `Callable` object which obtains the data passed in the request and returns the output -- `inputs`: defines the number, types, and shapes for model inputs -- `outputs`: defines the number, types, and shapes for model outputs -- `config`: more customization for model deployment and behavior on the Triton server -- `strict`: enable inference callable output validation of data types and shapes against provided model config (default: False) - -Once the `bind` method is called, the model is created in the Triton Inference Server model store under -the provided `model_name`. - -## Inference Callable - -The inference callable is an entry point for inference. This can be any callable that receives the data for -model inputs in the form of a list of request dictionaries where input names are mapped into ndarrays. -Input can be also adapted to different more convenient forms using a set of decorators. -**More details about designing inference callable and using of decorators can be found -in [Inference Callable](inference_callable.md) page.** - -In the simplest implementation for functionality that passes input data on output, a lambda can be used: - -```python -import numpy as np -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton - -with Triton() as triton: - triton.bind( - model_name="Identity", - infer_func=lambda requests: requests, - inputs=[Tensor(dtype=np.float32, shape=(1,))], - outputs=[Tensor(dtype=np.float32, shape=(1,))], - config=ModelConfig(max_batch_size=8) - ) -``` - -## Multi-instance model inference - -Multi-instance model inference is a mechanism for loading multiple instances of the same model and calling -them alternately (to hide transfer overhead). - -With the `Triton` class, it can be realized by providing the list of multiple inference callables to `Triton.bind` -in the `infer_func` parameter. - -The example presents multiple instances of the Linear PyTorch model loaded on separate devices. - -First, define the wrapper class for the inference handler. The class initialization receives a model and device as -arguments. The inference handling is done by method `__call__` where the `model` instance is called: - - -```python -import torch -from pytriton.decorators import batch - - -class _InferFuncWrapper: - def __init__(self, model: torch.nn.Module, device: str): - self._model = model - self._device = device - - @batch - def __call__(self, **inputs): - (input1_batch,) = inputs.values() - input1_batch_tensor = torch.from_numpy(input1_batch).to(self._device) - output1_batch_tensor = self._model(input1_batch_tensor) - output1_batch = output1_batch_tensor.cpu().detach().numpy() - return [output1_batch] -``` - -Next, create a factory function where a model and instances of `_InferFuncWrapper` are created - one per each device: - - - -```python -def _infer_function_factory(devices): - infer_fns = [] - for device in devices: - model = torch.nn.Linear(20, 30).to(device).eval() - infer_fns.append(_InferFuncWrapper(model=model, device=device)) - - return infer_fns -``` - -Finally, the list of callable objects is passed to `infer_func` parameter of the `Triton.bind` function: - - - -```python -import numpy as np -from pytriton.triton import Triton -from pytriton.model_config import ModelConfig, Tensor - -with Triton() as triton: - triton.bind( - model_name="Linear", - infer_func=_infer_function_factory(devices=["cuda", "cpu"]), - inputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - ], - outputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=16), - ) - ... -``` - -Once the multiple callable objects are passed to `infer_func`, the Triton server gets information that multiple instances -of the same model have been created. The incoming requests are distributed among created instances. In our case executing -two instances of a `Linear` model loaded on CPU and GPU devices. - -## Defining Inputs and Outputs - -The integration of the Python model requires the inputs and outputs types of the model. This is required to -correctly map the input and output data passed through the Triton Inference Server. - -The simplest definition of model inputs and outputs expects providing the type of data and the shape per input: - - -```python -import numpy as np -from pytriton.model_config import Tensor - -inputs = [ - Tensor(dtype=np.float32, shape=(-1,)), -] -output = [ - Tensor(dtype=np.float32, shape=(-1,)), - Tensor(dtype=np.int32, shape=(-1,)), -] -``` - - -The provided configuration creates the following tensors: - -- Single input: - - name: INPUT_1, data type: FLOAT32, shape=(-1,) -- Two outputs: - - name: OUTPUT_1, data type: FLOAT32, shape=(-1,) - - name: OUTPUT_2, data type: INT32, shape=(-1,) - -The `-1` means a dynamic shape of the input or output. - -To define the name of the input and its exact shape, the following definition can be used: - -```python -import numpy as np -from pytriton.model_config import Tensor - -inputs = [ - Tensor(name="image", dtype=np.float32, shape=(224, 224, 3)), -] -outputs = [ - Tensor(name="class", dtype=np.int32, shape=(1000,)), -] -``` - -This definition describes that the model has: - -- a single input named `image` of size 224x224x3 and 32-bit floating-point data type -- a single output named `class` of size 1000 and 32-bit integer data type. - -The `dtype` parameter can be either `numpy.dtype`, `numpy.dtype.type`, or `str`. For example: - -```python -import numpy as np -from pytriton.model_config import Tensor - -tensor1 = Tensor(name="tensor1", shape=(-1,), dtype=np.float32), -tensor2 = Tensor(name="tensor2", shape=(-1,), dtype=np.float32().dtype), -tensor3 = Tensor(name="tensor3", shape=(-1,), dtype="float32"), -``` - -!!! warning "dtype for bytes and string inputs/outputs" - - When using the `bytes` dtype, NumPy removes trailing `\x00` bytes. - Therefore, for arbitrary bytes, it is required to use `object` dtype. - - > np.array([b"\xff\x00"]) - array([b'\xff'], dtype='|S2') - - > np.array([b"\xff\x00"], dtype=object) - array([b'\xff\x00'], dtype=object) - - For ease of use, for encoded string values, users might use `bytes` dtype. - -## Throwing Unrecoverable errors - -When the model gets into a state where further inference is impossible, -you can throw [PyTritonUnrecoverableError][pytriton.exceptions.PyTritonUnrecoverableError] -from the inference callable. This will cause NVIDIA Triton Inference Server to shut down. -This might be useful when the model is deployed on a cluster in a multi-node setup. In that case -to recover the model you need to restart all "workers" on the cluster. - -When the model gets into a state where further inference is impossible, -you can throw the [PyTritonUnrecoverableError][pytriton.exceptions.PyTritonUnrecoverableError] -from the inference callable. This will cause the NVIDIA Triton Inference Server to shut down. -This might be useful when the model is deployed on a cluster in a multi-node setup. In that case, -to recover the model, you need to restart all "workers" on the cluster. - -```python -from typing import Dict -import numpy as np -from pytriton.decorators import batch -from pytriton.exceptions import PyTritonUnrecoverableError - - -@batch -def infer_fn(**inputs: np.ndarray) -> Dict[str, np.ndarray]: - ... - - try: - outputs = model(**inputs) - except Exception as e: - raise PyTritonUnrecoverableError( - "Some unrecoverable error occurred, " - "thus no further inferences are possible." - ) from e - - ... - return outputs -``` \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/docs/building.md b/stf/stf-api-alternative/pytriton/docs/building.md deleted file mode 100644 index 31bf7838a195bf4bb7b8fd25fd07a989d60ec4ea..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/building.md +++ /dev/null @@ -1,85 +0,0 @@ - - -# Building binary package from source - -This guide provides an outline of the process for building the PyTriton binary package from source. -It offers the flexibility to modify the PyTriton code and integrate it with various versions -of the Triton Inference Server, including custom builds. -Additionally, it allows you to incorporate hotfixes that have not yet been officially released. - -## Prerequisites - -Before building the PyTriton binary package, ensure the following: - -- Docker with [buildx plugin](https://github.com/docker/buildx) is installed on the system. For more information, refer to the Docker documentation. -- Access to the Docker daemon is available from the system or container. - -If you plan to build `arm64` wheel on `amd64` machine we suggest to use QUEMU for emulation. -To enable QUEMU on Ubuntu you need to: -- Install the QEMU packages on your x86 machine: -```shell -sudo apt-get install qemu binfmt-support qemu-user-static -``` -- Register the QEMU emulators for ARM architectures: -```shell -docker run --rm --privileged multiarch/qemu-user-static --reset -p yes -``` - -## Building PyTriton binary package - -To build the wheel binary package, follow these steps from the root directory of the project: - -```shell -make install-dev -make dist -``` - -*Note*: The default build create wheel for `x86_64` architecture. If you would like to build the wheel for `aarch64` use -```shell -make dist -e PLATFORM=linux/arm64 -``` -We use Docker convention name for platforms. The supported options are `linux/amd64` and `linux/arm64`. - -The wheel package will be located in the `dist` directory. To install the library, run the following `pip` command: - -```shell -pip install dist/nvidia_pytriton-*-py3-none-*.whl -``` - -*Note*: The wheel name would have `x86_64` or `aarch64` in name based on selected platform. - -## Building for a specific Triton Inference Server version - -Building for an unsupported OS or hardware platform is possible. -PyTriton requires a Python backend and either an HTTP or gRPC endpoint. -The build can be CPU-only, as inference is performed on Inference Handlers. - -For more information on the Triton Inference Server build process, refer to the -[building section of Triton Inference Server documentation](https://github.com/triton-inference-server/server/blob/main/docs/customization_guide/build.md). - -!!! warning "Untested Build" - - The Triton Inference Server has only been rigorously tested on Ubuntu 20.04. Other OS and hardware platforms are not - officially supported. You can test the build by following the steps outlined in the - [Triton Inference Server testing guide](https://github.com/triton-inference-server/server/blob/main/docs/customization_guide/test.md). - -By the following [docker method steps](https://github.com/triton-inference-server/server/blob/main/docs/customization_guide/build.md#building-with-docker) -you can create a `tritonserver:latest` Docker image that can be used to build PyTriton with the following command: - -```shell -make dist -e TRITONSERVER_IMAGE_VERSION=latest -e TRITONSERVER_IMAGE_NAME=tritonserver:latest -``` diff --git a/stf/stf-api-alternative/pytriton/docs/chunking_guide.md b/stf/stf-api-alternative/pytriton/docs/chunking_guide.md deleted file mode 100644 index c900b38c787f77c75aaf5d533ac206b96d2f068d..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/chunking_guide.md +++ /dev/null @@ -1,187 +0,0 @@ - - -# How to use PyTriton client to split a large input into smaller batches and send them to the server in parallel - -In this article, you will learn how to use PyTriton clients to create a chunking client that can handle inputs that are larger than the maximum batch size of your model. - -First, you need to create a model that can process a batch of inputs and produce a batch of outputs. For simplicity, let's assume that your model can only handle two inputs at a time. We will call this model "Batch2" and run it on a local Triton server. - -Next, you need to create a client that can send requests to your model. In this article, we will use the FuturesModelClient, which returns a Future object for each request. A Future object is a placeholder that can be used to get the result or check the status of the request later. - -However, there is a problem with using the FuturesModelClient directly. If you try to send an input that is larger than the maximum batch size of your model, you will get an error. For example, the following code tries to send an input of size 4 to the "Batch2" model, which has a maximum batch size of 2: - - - - - -```python -import numpy as np -from pytriton.client import FuturesModelClient - -with FuturesModelClient(f"localhost", "Batch2") as client: - input_tensor = np.zeros((4, 1), dtype=np.int32) - print(client.infer_batch(input_tensor).result()) -``` - -This code will raise an exception like this: - -``` -PyTritonClientInferenceServerError: Error occurred during inference request. Message: [request id: 0] inference request batch-size must be <= 2 for 'Batch2' -``` - -To solve this problem, we can use a ChunkingClient class that inherits from FuturesModelClient and overrides the infer_batch method. The ChunkingClient class takes a chunking strategy as an argument, which is a function that takes the input dictionary and the maximum batch size as parameters and yields smaller dictionaries of inputs. The default chunking strategy simply splits the input along the first dimension according to the maximum batch size. For example, if the input is `{"INPUT_1": np.zeros((5, 1), dtype=np.int32)}` and the maximum batch size is 2, then the default chunking strategy will yield: - -``` -{"INPUT_1": np.zeros((2, 1), dtype=np.int32)} -{"INPUT_1": np.zeros((2, 1), dtype=np.int32)} -{"INPUT_1": np.zeros((1, 1), dtype=np.int32)} -``` - -You can also define your own chunking strategy if you have more complex logic for splitting your input. - - - - - - - - - -```python -# Define a ChunkingClient class that inherits from FuturesModelClient and splits the input into smaller batches -import concurrent.futures -from pytriton.client import FuturesModelClient - -class ChunkingClient(FuturesModelClient): - def __init__(self, host, model_name, chunking_strategy=None, max_workers=None): - super().__init__(host, model_name, max_workers=max_workers) - self.chunking_strategy = chunking_strategy or self.default_chunking_strategy - - def default_chunking_strategy(self, kwargs, max_batch_size): - # Split the input by the first dimension according to the max batch size - size_of_dimention_0 = self.find_size_0(kwargs) - for i in range(0, size_of_dimention_0, max_batch_size): - yield {key: value[i:i+max_batch_size] for key, value in kwargs.items()} - - def find_size_0(self, kwargs): - # Check the size of the first dimension of each tensor and raise errors if they are not consistent or valid - size_of_dimention_0 = None - for key, value in kwargs.items(): - if isinstance(value, np.ndarray): - if value.ndim > 0: - size = value.shape[0] - if size_of_dimention_0 is None or size_of_dimention_0 == size: - size_of_dimention_0 = size - else: - raise ValueError("The tensors have different sizes at the first dimension") - else: - raise ValueError("The tensor has no first dimension") - else: - raise TypeError("The value is not a numpy tensor") - return size_of_dimention_0 - - def infer_batch(self, *args, **kwargs): - max_batch_size = self.model_config().result().max_batch_size - # Send the smaller batches to the server in parallel and yield the futures with results - futures = [super(ChunkingClient, self).infer_batch(*args, **chunk) for chunk in self.chunking_strategy(kwargs, max_batch_size)] - for future in futures: - yield future -``` -To use the ChunkingClient class, you can create an instance of it and use it in a context manager. For example: - - - -```python -# Use the ChunkingClient class with the default strategy to send an input of size 5 to the "Batch2" model -import numpy as np -from pytriton.client import FuturesModelClient -chunker_client = ChunkingClient("localhost", "Batch2") -results = [] -with chunker_client as client: - input_tensor = np.zeros((5, 1), dtype=np.int32) - # Print the results of each future without concatenating them - for future in client.infer_batch(INPUT_1=input_tensor): - results.append(future.result()) -print(results) -``` - - - - -This code will print: - - -``` -{'OUTPUT_1': array([[0], - [0]], dtype=int32)} -{'OUTPUT_1': array([[0], - [0]], dtype=int32)} -{'OUTPUT_1': array([[0]], dtype=int32)} -``` - -You can see that the input is split into three batches of sizes 2, 2, and 1, and each batch is sent to the server in parallel. The results are returned as futures that can be accessed individually without concatenating them. \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/docs/clients.md b/stf/stf-api-alternative/pytriton/docs/clients.md deleted file mode 100644 index a17cf3acf95f30b2f915f09ee1f2721ffd9cd6d3..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/clients.md +++ /dev/null @@ -1,416 +0,0 @@ - - - -# Triton clients - -The prerequisite for this page is to install PyTriton. You also need ```Linear``` model described in quick_start. You should run it so client can connect to it. - -The clients section presents how to send requests to the Triton Inference Server using the PyTriton library. - -## ModelClient - -ModelClient is a simple client that can perform inference requests synchronously. You can use ModelClient to communicate with the deployed model using HTTP or gRPC protocol. You can specify the protocol when creating the ModelClient object. - -For example, you can use ModelClient to send requests to a PyTorch model that performs linear regression: - - - - - - - - - -```python -import torch -from pytriton.client import ModelClient - -# Create some input data as a numpy array -input1_data = torch.randn(128, 2).cpu().detach().numpy() - -# Create a ModelClient object with the server address and model name -client = ModelClient("localhost:8000", "Linear") -# Call the infer_batch method with the input data -result_dict = client.infer_batch(input1_data) -# Close the client to release the resources -client.close() - -# Print the result dictionary -print(result_dict) -``` - - - - - -You can also use ModelClient to send requests to a model that performs image classification. The example assumes that a model takes in an image and returns the top 5 predicted classes. This model is not included in the PyTriton library. - -You need to convert the image to a numpy array and resize it to the expected input shape. You can use Pillow package to do this. - - -```python -import numpy as np -from PIL import Image -from pytriton.client import ModelClient - -# Create some input data as a numpy array of an image -img = Image.open("cat.jpg") -img = img.resize((224, 224)) -input_data = np.array(img) - -# Create a ModelClient object with the server address and model name -client = ModelClient("localhost:8000", "ImageNet") -# Call the infer_sample method with the input data -result_dict = client.infer_sample(input_data) -# Close the client to release the resources -client.close() - -# Print the result dictionary -print(result_dict) -``` - -You need to install Pillow package to run the above example: -```bash -pip install Pillow -``` - -## FuturesModelClient - -FuturesModelClient is a concurrent.futures based client that can perform inference requests in a parallel way. You can use FuturesModelClient to communicate with the deployed model using HTTP or gRPC protocol. You can specify the protocol when creating the FuturesModelClient object. - -For example, you can use FuturesModelClient to send multiple requests to a text generation model that takes in text prompts and returns generated texts. The TextGen model is not included in the PyTriton library. The example assumes that the model returns a single output tensor with the generated text. The example also assumes that the model takes in a list of text prompts and returns a list of generated texts. - -You need to convert the text prompts to numpy arrays of bytes using a tokenizer from transformers. You also need to detokenize the output texts using the same tokenizer: - - -```python -import numpy as np -from pytriton.client import FuturesModelClient -from transformers import AutoTokenizer - -# Create some input data as a list of text prompts -input_data_list_text = ["Write a haiku about winter.", "Summarize the article below in one sentence.", "Generate a catchy slogan for PyTriton."] - -# Create a tokenizer from transformers -tokenizer = AutoTokenizer.from_pretrained("gpt2") - -# Convert the text prompts to numpy arrays of bytes using the tokenizer -input_data_list = [np.array(tokenizer.encode(prompt)) for prompt in input_data_list_text] - -# Create a FuturesModelClient object with the server address and model name -with FuturesModelClient("localhost:8000", "TextGen") as client: - # Call the infer_sample method for each input data in the list and store the returned futures - output_data_futures = [client.infer_sample(input_data) for input_data in input_data_list] - # Wait for all the futures to complete and get the results - output_data_list = [output_data_future.result() for output_data_future in output_data_futures] - -# Print tokens -print(output_data_list) - -# Detokenize the output texts using the tokenizer and print them -output_texts = [tokenizer.decode(output_data["OUTPUT_1"]) for output_data in output_data_list] -for output_text in output_texts: - print(output_text) -``` - -You need to install transformers package to run the above example: -```bash -pip install transformers -``` - -You can also use FuturesModelClient to send multiple requests to an image classification model that takes in image data and returns class labels or probabilities. The ImageNet model is described above. - -In this case, you can use the infer_batch method to send a batch of images as input and get a batch of outputs. You need to stack the images along the first dimension to form a batch. You can also print the class names corresponding to the output labels: - - -``` python -import numpy as np -from PIL import Image -from pytriton.client import FuturesModelClient - -# Create some input data as a list of lists of image arrays -input_data_list = [] -for batch in [["cat.jpg", "dog.jpg", "bird.jpg"], ["car.jpg", "bike.jpg", "bus.jpg"], ["apple.jpg", "banana.jpg", "orange.jpg"]]: - batch_data = [] - for filename in batch: - img = Image.open(filename) - img = img.resize((224, 224)) - img = np.array(img) - batch_data.append(img) - # Stack the images along the first dimension to form a batch - batch_data = np.stack(batch_data, axis=0) - input_data_list.append(batch_data) - -# Create a list of class names for ImageNet -class_names = ["tench", "goldfish", "great white shark", ...] - -# Create a FuturesModelClient object with the server address and model name -with FuturesModelClient("localhost:8000", "ImageNet") as client: - # Call the infer_batch method for each input data in the list and store the returned futures - output_data_futures = [client.infer_batch(input_data) for input_data in input_data_list] - # Wait for all the futures to complete and get the results - output_data_list = [output_data_future.result() for output_data_future in output_data_futures] - -# Print the list of result dictionaries -print(output_data_list) - -# Print the class names corresponding to the output labels for each batch -for output_data in output_data_list: - output_labels = output_data["OUTPUT_1"] - for output_label in output_labels: - class_name = class_names[output_label] - print(f"The image is classified as {class_name}.") -``` - -## AsyncioModelClient - -AsyncioModelClient is an asynchronous client that can perform inference requests using the asyncio library. You can use AsyncioModelClient to communicate with the deployed model using HTTP or gRPC protocol. You can specify the protocol when creating the AsyncioModelClient object. - -For example, you can use AsyncioModelClient to send requests to a PyTorch model that performs linear regression: - - -```python -import torch -from pytriton.client import AsyncioModelClient - -# Create some input data as a numpy array -input1_data = torch.randn(2).cpu().detach().numpy() - -# Create an AsyncioModelClient object with the server address and model name -client = AsyncioModelClient("localhost:8000", "Linear") -# Call the infer_sample method with the input data -result_dict = await client.infer_sample(input1_data) -# Close the client to release the resources -client.close() - -# Print the result dictionary -print(result_dict) -``` - -You can also use FastAPI to create a web application that exposes the results of inference at an HTTP endpoint. FastAPI is a modern, fast, web framework for building APIs with Python 3.6+ based on standard Python type hints. - -To use FastAPI, you need to install it with: - -```bash -pip install fastapi -``` - -You also need an ASGI server, for production such as Uvicorn or Hypercorn. - -To install Uvicorn, run: - -```bash -pip install uvicorn[standard] -``` - -The `uvicorn` uses port `8000` as default for web server. Triton server default port is also `8000` for HTTP protocol. You can change uvicorn port by using `--port` option. PyTriton also supports custom ports configuration for Triton server. The class `TritonConfig` contains parameters for ports configuration. You can pass it to `Triton` during initialization: - - -```python -config = TritonConfig(http_port=8015) -triton_server = Triton(config=config) -``` - -You can use this `triton_server` object to bind your inference model and run HTTP endpoint from Triton Inference Server at port `8015`. - - -Then you can create a FastAPI app that uses the AsyncioModelClient to perform inference and return the results as JSON: - - -```python -from fastapi import FastAPI -import torch -from pytriton.client import AsyncioModelClient - -# Create an AsyncioModelClient object with the server address and model name -config_client = AsyncioModelClient("localhost:8000", "Linear") - -app = FastAPI() - -@app.get("/predict") -async def predict(): - # Create some input data as a numpy array - input1_data = torch.randn(2).cpu().detach().numpy() - - # Create an AsyncioModelClient object from existing client to avoid pulling config from server - async with AsyncioModelClient.from_existing_client(config_client) as request_client: - # Call the infer_sample method with the input data - result_dict = await request_client.infer_sample(input1_data) - - # Return the result dictionary as JSON - return result_dict - -@app.on_event("shutdown") -async def shutdown(): - # Close the client to release the resources - await config_client.close() -``` - -Save this file as `main.py`. - -To run the app, use the command: - - -```bash -uvicorn main:app --reload --port 8015 -``` - -You can then access the endpoint at `http://127.0.0.1:8015/predict` and see the JSON response. - -You can also check the interactive API documentation at `http://127.0.0.1:8015/docs`. - -You can test your server using curl: - - -```bash -curl -X 'GET' \ - 'http://127.0.0.1:8015/predict' \ - -H 'accept: application/json' -``` - -Command will print three random numbers: - -```python -[-0.2608422636985779,-0.6435106992721558,-0.3492531180381775] -``` - -For more information about FastAPI and Uvicorn, check out these links: - -- [FastAPI documentation](https://fastapi.tiangolo.com/) -- [Uvicorn documentation](https://www.uvicorn.org/) - - -## Client timeouts - -When creating a [ModelClient][pytriton.client.client.ModelClient] or [FuturesModelClient][pytriton.client.client.FuturesModelClient] object, you can specify the timeout for waiting until the server and model are ready using the `init_timeout_s` parameter. By default, the timeout is set to 5 minutes (300 seconds). - -Example usage: - - -```python -import numpy as np -from pytriton.client import ModelClient, FuturesModelClient - -input1_data = np.random.randn(128, 2) -with ModelClient("localhost", "MyModel", init_timeout_s=120) as client: - # Raises PyTritonClientTimeoutError if the server or model is not ready within the specified timeout - result_dict = client.infer_batch(input1_data) - - -with FuturesModelClient("localhost", "MyModel", init_timeout_s=120) as client: - future = client.infer_batch(input1_data) - ... - # It will raise `PyTritonClientTimeoutError` if the server is not ready and the model is not loaded within 120 seconds - # from the time `infer_batch` was called by a thread from `ThreadPoolExecutor` - result_dict = future.result() -``` - -You can disable the default behavior of waiting for the server and model to be ready during first inference request by setting `lazy_init` to `False`: - - -```python -import numpy as np -from pytriton.client import ModelClient, FuturesModelClient - -input1_data = np.random.randn(128, 2) - -# will raise PyTritonClientTimeoutError if server is not ready and model loaded -# within 120 seconds during intialization of client -with ModelClient("localhost", "MyModel", init_timeout_s=120, lazy_init=False) as client: - result_dict = client.infer_batch(input1_data) -``` - -You can specify the timeout for the client to wait for the inference response from the server. -The default timeout is 60 seconds. You can specify the timeout when creating the [ModelClient][pytriton.client.client.ModelClient] or [FuturesModelClient][pytriton.client.client.FuturesModelClient] object: - - -```python -import numpy as np -from pytriton.client import ModelClient, FuturesModelClient - -input1_data = np.random.randn(128, 2) -with ModelClient("localhost", "MyModel", inference_timeout_s=240) as client: - # Raises `PyTritonClientTimeoutError` if the server does not respond to inference request within 240 seconds - result_dict = client.infer_batch(input1_data) - - -with FuturesModelClient("localhost", "MyModel", inference_timeout_s=240) as client: - future = client.infer_batch(input1_data) - ... - # Raises `PyTritonClientTimeoutError` if the server does not respond within 240 seconds - # from the time `infer_batch` was called by a thread from `ThreadPoolExecutor` - result_dict = future.result() -``` - -!!! warning "gRPC client timeout not fully supported" - - There are some missing features in the gRPC client that prevent it from working correctly with timeouts - used during the wait for the server and model to be ready. This may cause the client to hang if the server - doesn't respond with the current server or model state. - -!!! info "Server side timeout not implemented" - - Currently, there is no support for server-side timeout. The server will continue to process the request even if the client timeout is reached. diff --git a/stf/stf-api-alternative/pytriton/docs/custom_params.md b/stf/stf-api-alternative/pytriton/docs/custom_params.md deleted file mode 100644 index ae64ecc96360d5a030407cab5c4785c19128e285..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/custom_params.md +++ /dev/null @@ -1,101 +0,0 @@ - - -# Custom HTTP/gRPC headers and parameters - -This document provides guidelines for using custom HTTP/gRPC headers and parameters with PyTriton. -Original Triton documentation related to parameters can be found [here](https://github.com/triton-inference-server/server/blob/main/docs/protocol/extension_parameters.md). -Now, undecorated inference function accepts list of Request instances. -Request class contains following fields: -- data - for inputs (stored as dictionary, but can be also accessed with request dict interface e.g. request["input_name"]) -- parameters - for combined parameters and HTTP/gRPC headers - -!!! warning "Parameters/headers usage limitations" - - Currently, custom parameters and headers can be only accessed in undecorated inference function (they don't work with decorators). - There is separate example how to use parameters/headers in preprocessing step (see [here](downloaded_input_data.md)) - - -## Parameters - -Parameters are passed to the inference callable as a dictionary. -The dictionary is stored in HTTP/gRPC request body payload. - -## HTTP/gRPC headers - -Custom HTTP/gRPC headers are passed to the inference callable in the same dictionary as parameters, -but they are stored in HTTP/gRPC request headers instead of the request body payload. -For the headers it is also necessary to specify the header prefix in Triton config, which is used to distinguish the custom -headers from standard ones (only headers with specified prefix are passed to the inference callable). - -## Usage - -1. Define inference callable (that one uses one parameter and one header): - -```python -import numpy as np -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton, TritonConfig - -def _infer_with_params_and_headers(requests): - responses = [] - for req in requests: - a_batch, b_batch = req.values() - scaled_add_batch = (a_batch + b_batch) / float(req.parameters["header_divisor"]) - scaled_sub_batch = (a_batch - b_batch) * float(req.parameters["parameter_multiplier"]) - responses.append({"scaled_add": scaled_add_batch, "scaled_sub": scaled_sub_batch}) - return responses -``` - -2. Bind inference callable to Triton ("header" is the prefix for custom headers): - - -```python -with Triton(config=TritonConfig(http_header_forward_pattern="header.*")) as triton: - triton.bind( - model_name="ParamsAndHeaders", - infer_func=_infer_with_params_and_headers, - inputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - Tensor(dtype=np.float32, shape=(-1,)), - ], - outputs=[ - Tensor(name="scaled_add", dtype=np.float32, shape=(-1,)), - Tensor(name="scaled_sub", dtype=np.float32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128), - ) - - triton.serve() -``` - -3. Call the model using ModelClient: - - - -```python -import numpy as np -from pytriton.client import ModelClient - -batch_size = 2 -a_batch = np.ones((batch_size, 1), dtype=np.float32) * 2 -b_batch = np.ones((batch_size, 1), dtype=np.float32) -``` - -```python -with ModelClient("localhost", "ParamsAndHeaders") as client: - result_batch = client.infer_batch(a_batch, b_batch, parameters={"parameter_multiplier": 2}, headers={"header_divisor": 3}) -``` diff --git a/stf/stf-api-alternative/pytriton/docs/decorators.md b/stf/stf-api-alternative/pytriton/docs/decorators.md deleted file mode 100644 index 0d7fbb5407fa0190341d66644c63493031890323..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/decorators.md +++ /dev/null @@ -1,290 +0,0 @@ - - -# Decorators - -The PyTriton provide decorators for operations on input requests to simplify passing the requests to the model inputs. -We have prepared several useful decorators for converting generic request input into common user needs. You can create -custom decorators tailored to your requirements and chain them with other decorators. - -## Batch - -In many cases, it is more convenient to receive input already batched in the form of a NumPy array instead of a list of -separate requests. For such cases, we have prepared the `@batch` decorator that adapts generic input into a batched -form. It passes kwargs to the inference function where each named input contains a NumPy array with a batch of requests -received by the Triton server. - -Below, we show the difference between decorated and undecorated functions bound with Triton: - -```python -import numpy as np -from pytriton.decorators import batch -from pytriton.proxy.types import Request - -# Sample input data with 2 requests - each with 2 inputs -input_data = [ - Request({'in1': np.array([[1, 1]]), 'in2': np.array([[2, 2]])}), - Request({'in1': np.array([[1, 2]]), 'in2': np.array([[2, 3]])}) -] - - -def undecorated_identity_fn(requests): - print(requests) - # As expected, requests = [ - # Request({'in1': np.array([[1, 1]]), 'in2': np.array([[2, 2]])}), - # Request({'in1': np.array([[1, 2]]), 'in2': np.array([[2, 3]])}), - # ] - results = requests - return results - - -@batch -def decorated_identity_fn(in1, in2): - print(in1, in2) - # in1 = np.array([[1, 1], [1, 2]]) - # in2 = np.array([[2, 2], [2, 3]]) - # Inputs are batched by `@batch` decorator and passed to the function as kwargs, so they can be automatically mapped - # with in1, in2 function parameters - # Of course, we could get these params explicitly with **kwargs like this: - # def decorated_infer_fn(**kwargs): - return {"out1": in1, "out2": in2} - - -undecorated_identity_fn(input_data) -decorated_identity_fn(input_data) -``` - -More examples using the `@batch` decorator with different frameworks are shown below. - -Example implementation for TensorFlow model: - -```python -import numpy as np -import tensorflow as tf - -from pytriton.decorators import batch - - -@batch -def infer_tf_fn(**inputs: np.ndarray): - (images_batch,) = inputs.values() - images_batch_tensor = tf.convert_to_tensor(images_batch) - output1_batch = model.predict(images_batch_tensor) - return [output1_batch] -``` - -Example implementation for PyTorch model: - -```python -import numpy as np -import torch - -from pytriton.decorators import batch - - -@batch -def infer_pt_fn(**inputs: np.ndarray): - (input1_batch,) = inputs.values() - input1_batch_tensor = torch.from_numpy(input1_batch).to("cuda") - output1_batch_tensor = model(input1_batch_tensor) - output1_batch = output1_batch_tensor.cpu().detach().numpy() - return [output1_batch] - -``` - -Example implementation with [named inputs and outputs](binding_models.md#defining-inputs-and-outputs): - -```python -import numpy as np -from pytriton.decorators import batch - - -@batch -def add_subtract_fn(a: np.ndarray, b: np.ndarray): - return {"add": a + b, "sub": a - b} - - -@batch -def multiply_fn(**inputs: np.ndarray): - a = inputs["a"] - b = inputs["b"] - return [a * b] -``` - -Example implementation with strings: - -```python -import numpy as np -from transformers import pipeline -from pytriton.decorators import batch - -CLASSIFIER = pipeline("zero-shot-classification", model="facebook/bart-base", device=0) - - -@batch -def classify_text_fn(text_array: np.ndarray): - text = text_array[0] # text_array contains one string at index 0 - text = text.decode("utf-8") # string is stored in byte array encoded in utf-8 - result = CLASSIFIER(text) - return [np.array(result)] # return statistics generated by classifier -``` - -## Sample - -`@sample` - takes the first request and converts it into named inputs. This decorator is useful with non-batching -models. Instead of a one-element list of requests, we get named inputs - `kwargs`. - -```python -from pytriton.decorators import sample - - -@sample -def infer_fn(sequence): - pass -``` - -## Group by keys - -`@group_by_keys` - groups requests with the same set of keys and calls the wrapped function for each group separately. -This decorator is -convenient to use before batching because the batching decorator requires a consistent set of inputs as it stacks them -into batches. - -```python -from pytriton.decorators import batch, group_by_keys - - -@group_by_keys -@batch -def infer_fn(mandatory_input, optional_input=None): - # perform inference - pass -``` - -## Group by values - -`@group_by_values(*keys)` - groups requests with the same input value (for selected keys) and calls the wrapped function -for each group separately. This decorator is particularly useful with models requiring dynamic parameters sent by users, -such as temperature. In this case, we want to run the model only for requests with the same temperature value. - -```python -from pytriton.decorators import batch, group_by_values - - -@batch -@group_by_values('temperature') -def infer_fn(mandatory_input, temperature): - # perform inference - pass -``` - -## Fill optionals - -`@fill_optionals(**defaults)` - fills missing inputs in requests with default values provided by the user. If model -owners have default values for some optional parameters, it's a good idea to provide them at the beginning, so other -decorators can create larger consistent groups and send them to the inference callable. - -```python -import numpy as np -from pytriton.decorators import batch, fill_optionals, group_by_values - - -@fill_optionals(temperature=np.array([0.7])) -@batch -@group_by_values('temperature') -def infer_fn(mandatory_input, temperature): - # perform inference - pass -``` - -`` - -## Pad batch - -`@pad_batch` - appends the last row to the input multiple times to achieve the desired batch size (preferred batch -size or max batch size from the model config, whichever is closer to the current input size). - -```python -from pytriton.decorators import batch, pad_batch - -@batch -@pad_batch -def infer_fn(mandatory_input): - # this model requires mandatory_input batch to be the size provided in the model config - pass -``` - -## First value - -`@first_value` - this decorator takes the first elements from batches for selected inputs specified by the `keys` -parameter. -If the value is a one-element array, it is converted to a scalar value. -This decorator is convenient to use with dynamic model parameters that users send in requests. -You can use `@group_by_values` before to have batches with the same values in each batch. - -```python -import numpy as np -from pytriton.decorators import batch, fill_optionals, first_value, group_by_values - -@fill_optionals(temperature=np.array([0.7])) -@batch -@group_by_values('temperature') -@first_value('temperature') -def infer_fn(mandatory_input, temperature): - # perform inference with scalar temperature=10 - pass -``` - -## Triton context - -The `@triton_context` decorator provides an additional argument called `triton_context`, -from which you can read the model config. - - ```python - from pytriton.decorators import triton_context - - -@triton_context -def infer_fn(input_list, **kwargs): - model_config = kwargs['triton_context'].model_config - # perform inference using some information from model_config - pass - ``` - -## Stacking multiple decorators - -Here is an example of stacking multiple decorators together. -We recommend starting with type 1 decorators, followed by types 2 and 3. -Place the `@triton_context` decorator last in the chain. - -```python -import numpy as np -from pytriton.decorators import batch, fill_optionals, first_value, group_by_keys, group_by_values, triton_context - - -@fill_optionals(temperature=np.array([0.7])) -@group_by_keys -@batch -@group_by_values('temperature') -@first_value('temperature') -@triton_context -def infer(triton_context, mandatory_input, temperature, opt1=None, opt2=None): - model_config = triton_context.model_config - # perform inference using: - # - some information from model_config - # - scalar temperature value - # - optional parameters opt1 and/or opt2 -``` diff --git a/stf/stf-api-alternative/pytriton/docs/deploying_in_clusters.md b/stf/stf-api-alternative/pytriton/docs/deploying_in_clusters.md deleted file mode 100644 index eb1ea59fc502cf4f09a018456788842d6e0ae276..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/deploying_in_clusters.md +++ /dev/null @@ -1,101 +0,0 @@ - - -# Deploying in Cluster - -The library can be used inside containers and deployed on Kubernetes clusters. There are certain prerequisites and -information that would help deploy the library in your cluster. - -## Health checks - -The library uses the Triton Inference Server to handle HTTP/gRPC requests. Triton Server provides endpoints to validate if -the server is ready and in a healthy state. The following API endpoints can be used in your orchestrator to -control the application ready and live states: - -- Ready: `/v2/health/ready` -- Live: `/v2/health/live` - -## Exposing ports - -The library uses the Triton Inference Server, which exposes the HTTP, gRPC, and metrics ports for communication. In the default -configuration, the following ports have to be exposed: - -- 8000 for HTTP -- 8001 for gRPC -- 8002 for metrics - -If the library is inside a Docker container, the ports can be exposed by passing an extra argument to the `docker run` -command. An example of passing ports configuration: - - - -```shell -docker run -p 8000:8000 -p 8001:8001 -p 8002:8002 {image} -``` - -To deploy a container in Kubernetes, add a ports definition for the container in YAML deployment configuration: - -```yaml -containers: - - name: pytriton - ... - ports: - - containerPort: 8000 - name: http - - containerPort: 8001 - name: grpc - - containerPort: 8002 - name: metrics -``` - -## Configuring shared memory - -The connection between Python callbacks and the Triton Inference Server uses shared memory to pass data between the -processes. In the Docker container, the default amount of shared memory is `64MB`, which may not be enough to pass input and -output data of the model. The PyTriton initialize `16MB` of shared memory for `Proxy Backend` at start to pass -input/output tensors between processes. The additional memory is allocated dynamically. In case of failure, the size -of available shared memory might need to be increased. - -To increase the available shared memory size, pass an additional flag to the `docker run` command. -An example of increasing the shared memory size to 8GB: - - - -```shell -docker run --shm-size 8GB {image} -``` -To increase the shared memory size for Kubernetes, the following configuration can be used: - -```yaml -spec: - volumes: - - name: shared-memory - emptyDir: - medium: Memory - containers: - - name: pytriton - ... - volumeMounts: - - mountPath: /dev/shm - name: shared-memory -``` - -## Specify container init process - -You can use the [`--init` flag](https://docs.docker.com/engine/reference/run/#specify-an-init-process) of the `docker run` -command to indicate that an init process should be used as the PID 1 in the container. -Specifying an init process ensures that reaping zombie processes are performed inside the container. The reaping zombie -processes functionality is important in case of an unexpected error occurrence in scripts hosting PyTriton. \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/docs/downloaded_input_data.md b/stf/stf-api-alternative/pytriton/docs/downloaded_input_data.md deleted file mode 100644 index a5d414fe90dc26aa737af3cc6781b379e5f47108..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/downloaded_input_data.md +++ /dev/null @@ -1,55 +0,0 @@ - - -# Example with downloaded input data - -In the following example, we will demonstrate how to effectively utilize PyTriton with downloaded input data. -While the model itself does not possess any inputs, it utilize custom parameters or headers to extract a URL and download data from an external source, such as an S3 bucket. - -The corresponding function can leverage the batch decorator since it does not rely on any parameters or headers. - -## Example - - -```python -import numpy as np -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton, TritonConfig - -@batch -def model_infer_function(**inputs): - ... - -def request_infer_function(requests): - for request in requests: - image_url = request.parameters["custom_url"] - image_jpeg = download(image_url) - image_data = decompress(image_jpeg) - request['images_data'] = image_data - outputs = model_infer_function(requests) - return outputs - -with Triton(config=TritonConfig(http_header_forward_pattern="custom.*")) as triton: - triton.bind( - model_name="ImgModel", - infer_func=request_infer_function, - inputs=[], - outputs=[Tensor(name="out", dtype=np.float32, shape=(-1,))], - config=ModelConfig(max_batch_size=128), - ) - triton.serve() -``` - diff --git a/stf/stf-api-alternative/pytriton/docs/inference_callable.md b/stf/stf-api-alternative/pytriton/docs/inference_callable.md deleted file mode 100644 index 80586c0086e637e387abc99e20b2885b9ef7dcba..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/inference_callable.md +++ /dev/null @@ -1,93 +0,0 @@ - - -# Inference Callable - -This document provides guidelines for creating an inference callable for PyTriton, which serves as the entry point for -handling inference requests. - -The inference callable is an entry point for handling inference requests. The interface of the inference callable -assumes it receives a list of requests with input dictionaries, where each dictionary represents one request mapping model input -names to NumPy ndarrays. -Requests contain also custom HTTP/gRPC headers and parameters in parameters dictionary. - -## Function - -The simples inference callable is a function that implement the interface to handle request and responses. -Request class contains following fields: -- data - for inputs (stored as dictionary, but can be also accessed with request dict interface e.g. request["input_name"]) -- parameters - for combined parameters and HTTP/gRPC headers -For more information about parameters and headers see [here](custom_params.md). - - ```python - import numpy as np - from typing import Dict, List - from pytriton.proxy.types import Request - - def infer_fn(requests: List[Request]) -> List[Dict[str, np.ndarray]]: - ... - ``` - -## Class - -In many cases is worth to use an object of given class as callable. This is especially useful when you want to have a -control over the order of initialized objects or models. - - - - ```python - import numpy as np - from typing import Dict, List - from pytriton.proxy.types import Request - - class InferCallable: - - def __call__(self, requests: List[Request]) -> List[Dict[str, np.ndarray]]: - ... - ``` - -## Binding to Triton - -To use the inference callable with PyTriton, it must be bound to a Triton server instance using the `bind` method: - - - -```python -import numpy as np -from pytriton.triton import Triton -from pytriton.model_config import ModelConfig, Tensor - -with Triton() as triton: - triton.bind( - model_name="MyInferenceFn", - infer_func=infer_fn, - inputs=[Tensor(shape=(1,), dtype=np.float32)], - outputs=[Tensor(shape=(1,), dtype=np.float32)], - config=ModelConfig(max_batch_size=8) - ) - - infer_callable = InferCallable() - triton.bind( - model_name="MyInferenceCallable", - infer_func=infer_callable, - inputs=[Tensor(shape=(1,), dtype=np.float32)], - outputs=[Tensor(shape=(1,), dtype=np.float32)], - config=ModelConfig(max_batch_size=8) - ) -``` - -For more information on serving the inference callable, refer to -the [Loading models section](binding_models.md) on Deploying Models page. diff --git a/stf/stf-api-alternative/pytriton/docs/initialization.md b/stf/stf-api-alternative/pytriton/docs/initialization.md deleted file mode 100644 index 0194d25485daf7666a978807c458d3313dc45b7c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/initialization.md +++ /dev/null @@ -1,111 +0,0 @@ - - -# Initialization - -The following page provides more details about possible options for configuring the -[Triton Inference Server](https://github.com/triton-inference-server/server) and working with -block and non-blocking mode for tests and deployment. - -## Configuring Triton - -Connecting Python models with Triton Inference Server working in the current environment requires creating -a [Triton][pytriton.triton.Triton] object. This can be done by creating a context: - - -```python -from pytriton.triton import Triton - -with Triton() as triton: - ... -``` - -or simply creating an object: - - -```python -from pytriton.triton import Triton - -triton = Triton() -``` - -The Triton Inference Server behavior can be configured by passing [config][pytriton.triton.TritonConfig] parameter: - - -```python -import pathlib -from pytriton.triton import Triton, TritonConfig - -triton_config = TritonConfig(log_file=pathlib.Path("/tmp/triton.log")) -with Triton(config=triton_config) as triton: - ... -``` - -and through environment variables, for example, set as in the command below: - - - -```sh -PYTRITON_TRITON_CONFIG_LOG_VERBOSITY=4 python my_script.py -``` - -The order of precedence of configuration methods is: - -- config defined through `config` parameter of [Triton][pytriton.triton.Triton] class `__init__` method -- config defined in environment variables -- default [TritonConfig][pytriton.triton.TritonConfig] values - -## Blocking mode - -The blocking mode will stop the execution of the current thread and wait for incoming HTTP/gRPC requests for inference -execution. This mode makes your application behave as a pure server. The example of using blocking mode: - - -```python -from pytriton.triton import Triton - -with Triton() as triton: - ... # Load models here - triton.serve() -``` - -## Background mode - -The background mode runs Triton as a subprocess and does not block the execution of the current thread. In this mode, you can run -Triton Inference Server and interact with it from the current context. The example of using background mode: - -```python -from pytriton.triton import Triton - -triton = Triton() -... # Load models here -triton.run() # Triton Server started -print("This print will appear") -triton.stop() # Triton Server stopped -``` - -## Filesystem usage - -PyTriton needs to access the filesystem for two purposes: - - - to communicate with the Triton backend using file sockets, - - storing copy of Triton backend and its binary dependencies. - -PyTriton creates temporary folders called Workspaces, where it stores the file descriptors for these operations. By default, these folders are located in `$HOME/.cache/pytriton` directory. However, you can change this location by setting the `PYTRITON_HOME` environment variable. - - - - diff --git a/stf/stf-api-alternative/pytriton/docs/installation.md b/stf/stf-api-alternative/pytriton/docs/installation.md deleted file mode 100644 index 92b06586a0fb9f1021f44e6a8c25e4fdea88fb89..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/installation.md +++ /dev/null @@ -1,190 +0,0 @@ - - -# Installation - -This page explains how to install the library. We assume that you have a basic understanding of the Python programming language -and are familiar with machine learning models. Using [Docker](https://www.docker.com/) is optional but not required. - -You should be comfortable with the Python programming language -and know how to work with Machine Learning models. Using [Docker](https://www.docker.com/) is optional and not necessary. - -The library can be installed in any of the following ways: - -- system environment -- virtualenv -- [Docker](https://www.docker.com/) image - -If you opt for using Docker, you can get NVIDIA optimized Docker images for Python frameworks from the [NVIDIA NGC Catalog](https://catalog.ngc.nvidia.com/containers). - -To run model inference on NVIDIA GPU using the Docker runtime, we recommend that you -install the [NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/overview.html), which enables GPU acceleration for containers. - -## Prerequisites - -Before installing the library, ensure that you meet the following requirements: - -- An operating system with glibc >= `2.35`. Triton Inference Server and PyTriton have only been rigorously tested on Ubuntu 22.04. - Other supported operating systems include Ubuntu Debian 11+, Rocky Linux 9+, and Red Hat Universal Base Image 9+. - - to check your glibc version, run `ldd --version` -- Python version >= `3.8` -- `pip >= `20.3` -- `libpython3.*.so` available in the operating system (appropriate for Python version). - -## Install from `pypi` - -You can install the package from [pypi.org](https://pypi.org/project/nvidia-pytriton/) by running the following command: - -```shell -pip install -U nvidia-pytriton -``` - -!!! note "Triton Inference Server binaries" - - The Triton Inference Server binaries are installed as part of the PyTriton package. - -## Setting Up Python Environment - -The Triton Inference Server is automatically run with your Python interpreter version. To use Triton binary you need -to make sure that `libpython3.*.so` library can be linked during PyTriton start. Install and provide location to -`libpython3.*.so` library in LD_LIBRARY_PATH before you will run PyTriton. Below we presented some options on how -to prepare your Python environment to run PyTriton with common tools. - -### Upgrading `pip` version - -You need to have `pip` version 20.3 or higher. To upgrade an older version of pip, run this command: - -```shell -pip install -U pip -``` - -### Using system interpreter - -When you are running PyTriton on Ubuntu 22.04 install the desired Python interpreter and `libpython3*so.` library. -```shell -# Install necessary packages -apt update -y -apt install -y software-properties-common - -# Add repository with various Python versions -add-apt-repository ppa:deadsnakes/ppa -y - -# Install Python 3.8 -apt install -y python3.8 libpython3.8 python3.8-distutils python3-pip \ - build-essential zlib1g-dev libncurses5-dev libgdbm-dev libnss3-dev libssl-dev libsqlite3-dev libreadline-dev \ - libffi-dev curl libbz2-dev pkg-config make - -# Install library for interpreter -python3.8 -m pip install nvidia-pytriton -``` - -### Creating virtualenv using `pyenv` - -In order to install different version replace the `3.8` with desired Python version in the example below: - -```shell -# Install necessary packages -apt update -y -apt install -y python3 python3-distutils python-is-python3 git \ - build-essential libssl-dev zlib1g-dev \ - libbz2-dev libreadline-dev libsqlite3-dev curl \ - libncursesw5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev - -# Install pyenv -curl https://pyenv.run | bash - -# Configure pyenv in current environment -export PYENV_ROOT="$HOME/.pyenv" -command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH" -eval "$(pyenv init -)" -eval "$(pyenv virtualenv-init -)" - -# Install Python 3.8 with shared library support -env PYTHON_CONFIGURE_OPTS="--enable-shared" pyenv install 3.8 - -# Create and activate virtualenv -pyenv virtualenv 3.8 venv -pyenv activate venv - -# export the library path -export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$(pyenv virtualenv-prefix)/lib - -# Install library for interpreter -pip install nvidia-pytriton -``` - -### Creating virtualenv using `venv` - -In order to install different version replace the `3.8` with desired Python version in the example below: - -```shell -# Install necessary packages -apt update -y -apt install -y software-properties-common - -# Add repository with various Python versions -add-apt-repository ppa:deadsnakes/ppa -y - -# Install Python 3.8 -apt install -y python3.8 libpython3.8 python3.8-distutils python3.8-venv python3.8-pip python-is-python3 \ - build-essential zlib1g-dev libncurses5-dev libgdbm-dev libnss3-dev libssl-dev libsqlite3-dev libreadline-dev \ - libffi-dev curl libbz2-dev pkg-config make - -# Create and activate virtualenv -python3.8 -m venv /opt/venv -source /opt/venv/bin/activate - -# Install library for interpreter -pip install nvidia-pytriton -``` - -### Creating virtualenv using `miniconda` - -In order to install different version replace the `3.8` with desired Python version in the example below: - -```shell -# Install necessary packages -apt update -y -apt install -y python3 python3-distutils python-is-python3 curl - -# Download miniconda -CONDA_VERSION=latest -TARGET_MACHINE=x86_64 -curl "https://repo.anaconda.com/miniconda/Miniconda3-${CONDA_VERSION}-Linux-${TARGET_MACHINE}.sh" --output miniconda.sh - -# Install miniconda and add to PATH -bash miniconda.sh -export PATH=~/miniconda3/bin/:$PATH - -# Initialize bash -conda init bash -bash - -# Create and activate virtualenv -conda create -c conda-forge -n venv python=3.8 -conda activate venv - -# Export the library path -export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$CONDA_PREFIX/lib - -# Install library for interpreter -pip install nvidia-pytriton -``` - -## Building binaries from source - -The binary package can be built from the source, allowing access to unreleased hotfixes, the ability to modify the PyTriton code, and compatibility with various Triton Inference Server versions, including custom server builds. -For further information on building the PyTriton binary, refer to the [Building](building.md) page. diff --git a/stf/stf-api-alternative/pytriton/docs/known_issues.md b/stf/stf-api-alternative/pytriton/docs/known_issues.md deleted file mode 100644 index 92053fc1a8eff182b6af8db75021a6c3c4319af4..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/known_issues.md +++ /dev/null @@ -1,25 +0,0 @@ - - -# Known Issues and Limitations - -- There is no one-to-one match between our solution and [Triton Inference Server](https://github.com/triton-inference-server/server) features, especially in terms of supporting a user model store. -- Support is currently limited to the x86-64 instruction set architecture. -- Running multiple scripts hosting PyTriton on the same machine or container is not feasible. -- Deadlocks may occur in some models when employing the NCCL communication library and multiple Inference Callables are triggered concurrently. This issue can be observed when deploying multiple instances of the same model or multiple models within a single server script. Additional information about this issue can be found [here](https://docs.nvidia.com/deeplearning/nccl/user-guide/docs/usage/communicators.html#using-multiple-nccl-communicators-concurrently). -- Enabling verbose logging may cause a significant performance drop in model inference. -- GRPC ModelClient doesn't support timeouts for model configuration and model metadata requests due to a limitation in the underlying tritonclient library. -- HTTP ModelClient may not respect the specified timeouts for model initialization and inference requests, especially when they are smaller than 1 second, resulting in longer waiting times. This issue is related to the underlying implementation of HTTP protocol. \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/docs/overrides/partials/copyright.html b/stf/stf-api-alternative/pytriton/docs/overrides/partials/copyright.html deleted file mode 100644 index e42b0307aba11a156db5d6dfdd57fec2aee3a8d2..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/overrides/partials/copyright.html +++ /dev/null @@ -1,16 +0,0 @@ - -Copyright © 2022 NVIDIA Corporation \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/docs/pypi.rst b/stf/stf-api-alternative/pytriton/docs/pypi.rst deleted file mode 100644 index 280458396f26c19a298e72dee5ecb0dc60ef4f26..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/pypi.rst +++ /dev/null @@ -1,135 +0,0 @@ -.. - Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - -PyTriton -========== - -PyTriton is a Flask/FastAPI-like interface that simplifies Triton's deployment in Python environments. -The library allows serving Machine Learning models directly from Python through -NVIDIA's `Triton Inference Server`_. - -.. _Triton Inference Server: https://github.com/triton-inference-server - -In PyTriton, as in Flask or FastAPI, you can define any Python function that executes a machine learning model prediction and exposes -it through an HTTP/gRPC API. PyTriton installs Triton Inference Server in your environment and uses it for handling -HTTP/gRPC requests and responses. Our library provides a Python API that allows attaching a Python function to Triton -and a communication layer to send/receive data between Triton and the function. This solution helps utilize the -performance features of Triton Inference Server, such as dynamic batching or response cache, without changing your model -environment. Thus, it improves the performance of running inference on GPU for models implemented in Python. The solution is -framework-agnostic and can be used along with frameworks like PyTorch, TensorFlow, or JAX. - - -Installation --------------- - -The package can be installed from `pypi`_ using: - -.. _pypi: https://pypi.org/project/nvidia-pytriton/ - -.. code-block:: text - - pip install -U nvidia-pytriton - -More details about installation can be found in the `documentation`_. - -.. _documentation: https://triton-inference-server.github.io/pytriton/latest/installation/ - -Example ---------- - -The example presents how to run Python model in Triton Inference Server without need to change the current working -environment. In the example we are using a simple `Linear` PyTorch model. - -The requirement for the example is to have installed PyTorch in your environment. You can do it running: - - -.. code-block:: text - - pip install torch - -In the next step define the `Linear` model: - -.. code-block:: python - - import torch - - model = torch.nn.Linear(2, 3).to("cuda").eval() - -Create a function for handling inference request: - -.. code-block:: python - - import numpy as np - from pytriton.decorators import batch - - - @batch - def infer_fn(**inputs: np.ndarray): - (input1_batch,) = inputs.values() - input1_batch_tensor = torch.from_numpy(input1_batch).to("cuda") - output1_batch_tensor = model(input1_batch_tensor) # Calling the Python model inference - output1_batch = output1_batch_tensor.cpu().detach().numpy() - return [output1_batch] - - -In the next step, create the connection between the model and Triton Inference Server using the bind method: - -.. code-block:: python - - from pytriton.model_config import ModelConfig, Tensor - from pytriton.triton import Triton - - # Connecting inference callback with Triton Inference Server - with Triton() as triton: - # Load model into Triton Inference Server - triton.bind( - model_name="Linear", - infer_func=infer_fn, - inputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - ], - outputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128) - ) - -Finally, serve the model with Triton Inference Server: - -.. code-block:: python - - from pytriton.triton import Triton - - with Triton() as triton: - ... # Load models here - triton.serve() - -The `bind` method is creating a connection between Triton Inference Server and the `infer_fn` which handle -the inference queries. The `inputs` and `outputs` describe the model inputs and outputs that are exposed in -Triton. The config field allows more parameters for model deployment. - -The `serve` method is blocking and at this point the application will wait for incoming HTTP/gRPC requests. From that -moment the model is available under name `Linear` in Triton server. The inference queries can be sent to -`localhost:8000/v2/models/Linear/infer` which are passed to the `infer_fn` function. - -Links -------- - -* Documentation: https://triton-inference-server.github.io/pytriton -* Source: https://github.com/triton-inference-server/pytriton -* Issues: https://github.com/triton-inference-server/pytriton/issues -* Changelog: https://github.com/triton-inference-server/pytriton/blob/main/CHANGELOG.md -* Known Issues: https://github.com/triton-inference-server/pytriton/blob/main/docs/known_issues.md -* Contributing: https://github.com/triton-inference-server/pytriton/blob/main/CONTRIBUTING.md diff --git a/stf/stf-api-alternative/pytriton/docs/quick_start.md b/stf/stf-api-alternative/pytriton/docs/quick_start.md deleted file mode 100644 index 45e3c8c0e26432fbd8428977fdcc4b8d093b4722..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/quick_start.md +++ /dev/null @@ -1,180 +0,0 @@ - - -# Quick Start - -The prerequisite for this page is to install PyTriton, which can be found -in the [installation](installation.md) -page. - -The Quick Start presents how to run a Python model in the Triton Inference Server without needing to change the current working -environment. In this example, we are using a simple `Linear` PyTorch model. - -The integration of the model requires providing the following elements: - -- The model - a framework or Python model or function that handles inference requests -- Inference Callable - function or class with `__call__` method, that handles the input data coming from Triton and returns the result -- Python function connection with Triton Inference Server - a binding for communication between Triton and the Inference Callable - -The requirement for the example is to have PyTorch installed in your environment. You can do this by running: - - - -```shell -pip install torch -``` - -In the next step, define the `Linear` model: - -```python -import torch - -model = torch.nn.Linear(2, 3).to("cuda").eval() -``` - -In the second step, create an inference callable as a function. The function obtains the HTTP/gRPC request data as an argument, which should be in the form of a NumPy array. The expected return object should also be a NumPy array. You can define an inference callable as a function that uses the `@batch` decorator from PyTriton. This decorator converts the input request into a more suitable format that can be directly passed to the model. You can read more about [decorators here](decorators.md). - -Example implementation: - - - -```python -import numpy as np -import torch - -from pytriton.decorators import batch - - -@batch -def infer_fn(**inputs: np.ndarray): - (input1_batch,) = inputs.values() - input1_batch_tensor = torch.from_numpy(input1_batch).to("cuda") - output1_batch_tensor = model(input1_batch_tensor) # Calling the Python model inference - output1_batch = output1_batch_tensor.cpu().detach().numpy() - return [output1_batch] -``` - -In the next step, you can create the binding between the inference callable and Triton Inference Server using the `bind` method from PyTriton. This method takes the model name, the inference callable, the inputs and outputs tensors, and an optional model configuration object. - - -```python -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton - -# Connecting inference callable with Triton Inference Server -with Triton() as triton: - triton.bind( - model_name="Linear", - infer_func=infer_fn, - inputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - ], - outputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128) - ) - ... -``` - -Finally, serve the model with the Triton Inference Server: - - - -```python -from pytriton.triton import Triton - -with Triton() as triton: - ... # Load models here - triton.serve() -``` - -The `bind` method creates a connection between the Triton Inference Server and the `infer_fn`, which handles -the inference queries. The `inputs` and `outputs` describe the model inputs and outputs that are exposed in -Triton. The config field allows more parameters for model deployment. - -The `serve` method is blocking, and at this point, the application waits for incoming HTTP/gRPC requests. From that -moment, the model is available under the name `Linear` in the Triton server. The inference queries can be sent to -`localhost:8000/v2/models/Linear/infer`, which are passed to the `infer_fn` function. - -If you would like to use Triton in the background mode, use `run`. More about that can be found -in the [Deploying Models](initialization.md) page. - -Once the `serve` or `run` method is called on the `Triton` object, the server status can be obtained using: - - - -```shell -curl -v localhost:8000/v2/health/live -``` - -The model is loaded right after the server starts, and its status can be queried using: - - - -```shell -curl -v localhost:8000/v2/models/Linear/ready -``` - -Finally, you can send an inference query to the model: - - -```shell -curl -X POST \ - -H "Content-Type: application/json" \ - -d @input.json \ - localhost:8000/v2/models/Linear/infer -``` - -The `input.json` with sample query: - -```json -{ - "id": "0", - "inputs": [ - { - "name": "INPUT_1", - "shape": [1, 2], - "datatype": "FP32", - "parameters": {}, - "data": [[-0.04281254857778549, 0.6738349795341492]] - } - ] -} -``` - -Read more about the HTTP/gRPC interface in the Triton Inference Server -[documentation](https://github.com/triton-inference-server/server/blob/main/docs/customization_guide/inference_protocols.md#httprest-and-grpc-protocols). - -You can also validate the deployed model using a simple client that can perform inference requests: - - - -```python -import torch -from pytriton.client import ModelClient - -input1_data = torch.randn(128, 2).cpu().detach().numpy() - -with ModelClient("localhost:8000", "Linear") as client: - result_dict = client.infer_batch(input1_data) - -print(result_dict) -``` - -The full example code can be found in [examples/linear_random_pytorch](../examples/linear_random_pytorch). - -More information about running the server and models can be found in [Deploying Models](initialization.md) page. diff --git a/stf/stf-api-alternative/pytriton/docs/remote_triton.md b/stf/stf-api-alternative/pytriton/docs/remote_triton.md deleted file mode 100644 index cb3013745da89d3ff1b0446a5cc059fc7c9b0306..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/docs/remote_triton.md +++ /dev/null @@ -1,62 +0,0 @@ - -# PyTriton remote mode - -Remote mode is a way to use the PyTriton with the Triton Inference Server running remotely (at this moment -it must be deployed on the same machine, but may be launched in a different container). - -To bind the model in remote mode, it is required to use the `RemoteTriton` class instead of `Triton`. -Only difference of using `RemoteTriton` is that it requires the triton `url` argument in the constructor. - -## Example of binding a model in remote mode - -Example below assumes that the Triton Inference Server is running on the same machine (launched with PyTriton -in separate python script). - -`RemoteTriton` binds remote model to existing Triton Inference Server. -When `RemoteTriton` is closed, the model is unloaded from the server. - - -```python -import numpy as np - -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import RemoteTriton, TritonConfig - -triton_config = TritonConfig( - cache_config=[f"local,size={1024 * 1024}"], # 1MB -) - -@batch -def _add_sub(**inputs): - a_batch, b_batch = inputs.values() - add_batch = a_batch + b_batch - sub_batch = a_batch - b_batch - return {"add": add_batch, "sub": sub_batch} - -with RemoteTriton(url='localhost') as triton: - triton.bind( - model_name="AddSub", - infer_func=_add_sub, - inputs=[Tensor(shape=(1,), dtype=np.float32), Tensor(shape=(1,), dtype=np.float32)], - outputs=[Tensor(shape=(1,), dtype=np.float32), Tensor(shape=(1,), dtype=np.float32)], - config=ModelConfig(max_batch_size=8, response_cache=True) - ) - triton.serve() -``` - - diff --git a/stf/stf-api-alternative/pytriton/examples/README.md b/stf/stf-api-alternative/pytriton/examples/README.md deleted file mode 100644 index 6593d6aa2db3b4cfdd554df42888bc74db426fca..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/README.md +++ /dev/null @@ -1,59 +0,0 @@ - - -# Examples - -We provide simple examples on how to integrate PyTorch, TensorFlow2, JAX, and simple Python models with the Triton Inference -Server using PyTriton. The examples are available -in the [GitHub repository](../examples). - -## Samples Models Deployment - -The list of example models deployments: - -- [Add-Sub Python model](../examples/add_sub_python) -- [Add-Sub Python model Jupyter Notebook](../examples/add_sub_notebook) -- [BART PyTorch from HuggingFace](../examples/huggingface_bart_pytorch) -- [BERT JAX from HuggingFace](../examples/huggingface_bert_jax) -- [Identity Python model](../examples/identity_python) -- [Linear RAPIDS/CuPy model](../examples/linear_cupy) -- [Linear RAPIDS/CuPy model Jupyter Notebook](../examples/linear_cupy_notebook) -- [Linear PyTorch model](../examples/identity_python) -- [Multi-Layer TensorFlow2](../examples/mlp_random_tensorflow2) -- [Multi Instance deployment for ResNet50 PyTorch model](../examples/multi_instance_resnet50_pytorch) -- [Multi Model deployment for Python models](../examples/multiple_models_python) -- [NeMo Megatron GPT model with multi-node support](../examples/nemo_megatron_gpt_multinode) -- [OPT JAX from HuggingFace with multi-node support](../examples/huggingface_opt_multinode_jax) -- [ResNet50 PyTorch from HuggingFace](../examples/huggingface_resnet_pytorch) -- [Stable Diffusion 1.5 from HuggingFace](../examples/huggingface_stable_diffusion) -- [Using custom HTTP/gRPC headers and parameters](../examples/use_parameters_and_headers) - -## Profiling models - -The [Perf Analyzer](https://github.com/triton-inference-server/client/blob/main/src/c++/perf_analyzer/README.md) can be -used to profile the models served through PyTriton. We have prepared an example of -using Perf Analyzer to profile BART PyTorch. See the example code in -the [GitHub repository](../examples/perf_analyzer). - -## Kubernetes Deployment - -The following examples contain a guide on how to deploy them on a Kubernetes cluster: - -- [BART PyTorch from HuggingFace](../examples/huggingface_bart_pytorch) -- [OPT JAX from HuggingFace with multi-node support](../examples/huggingface_opt_multinode_jax) -- [NeMo Megatron GPT model with multi-node support](../examples/nemo_megatron_gpt_multinode) -- [ResNet50 PyTorch from HuggingFace](../examples/huggingface_resnet_pytorch) -- [Stable Diffusion 1.5 from HuggingFace](../examples/huggingface_stable_diffusion) \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/add_sub_notebook/README.md b/stf/stf-api-alternative/pytriton/examples/add_sub_notebook/README.md deleted file mode 100644 index 6cbc3cbf50847a15316aecac1d55787cb34d1ec4..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/add_sub_notebook/README.md +++ /dev/null @@ -1,37 +0,0 @@ - - -# Add-Sub Python Model in Jupyter Notebook - -## Overview - -The example presents a simple Add-Sub model which perform an addition and subtraction operations -on passed input data and it also shows how to redefine inference callable and reload the model. - -Example consists of following scripts: - -- `add_sub.ipynb` - Jupyter Notebook file - -## Quick Start - -The step-by-step guide: - -1. Install PyTriton following the [installation instruction](../../README.md#installation) -2. In current terminal run jupyter notebook and open add_sub.ipynb file - -```shell -jupyter notebook -``` diff --git a/stf/stf-api-alternative/pytriton/examples/add_sub_notebook/__init__.py b/stf/stf-api-alternative/pytriton/examples/add_sub_notebook/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/add_sub_notebook/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/add_sub_notebook/add_sub.ipynb b/stf/stf-api-alternative/pytriton/examples/add_sub_notebook/add_sub.ipynb deleted file mode 100644 index 37233806bb1f4bc1ff7fb9950a8a943e8b0a355a..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/add_sub_notebook/add_sub.ipynb +++ /dev/null @@ -1,320 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Example of using Triton Server Wrapper in Jupyter notebook" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Triton server setup with custom model" - ] - }, - { - "cell_type": "markdown", - "source": [ - "Install dependencies" - ], - "metadata": { - "collapsed": false - } - }, - { - "cell_type": "code", - "execution_count": null, - "outputs": [], - "source": [ - "import sys\n", - "!{sys.executable} -m pip install numpy\n", - "!{sys.executable} -m pip install cupy-cuda12x --extra-index-url=https://pypi.ngc.nvidia.com" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - } - }, - { - "cell_type": "markdown", - "source": [ - "Required imports:" - ], - "metadata": { - "collapsed": false - } - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "pycharm": { - "name": "#%%\n" - } - }, - "outputs": [], - "source": [ - "import numpy as np\n", - "\n", - "from pytriton.decorators import batch\n", - "from pytriton.model_config import ModelConfig, Tensor\n", - "from pytriton.triton import Triton" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Define inference callable:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "@batch\n", - "def _add_sub(**inputs):\n", - " a_batch, b_batch = inputs.values()\n", - " add_batch = a_batch + b_batch\n", - " sub_batch = a_batch - b_batch\n", - " return {\"add\": add_batch, \"sub\": sub_batch}" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Instantiate titon wrapper class and load model with defined callable:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "triton = Triton()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "triton.bind(\n", - " model_name=\"AddSub\",\n", - " infer_func=_add_sub,\n", - " inputs=[\n", - " Tensor(dtype=np.float32, shape=(-1,)),\n", - " Tensor(dtype=np.float32, shape=(-1,)),\n", - " ],\n", - " outputs=[\n", - " Tensor(name=\"add\", dtype=np.float32, shape=(-1,)),\n", - " Tensor(name=\"sub\", dtype=np.float32, shape=(-1,)),\n", - " ],\n", - " config=ModelConfig(max_batch_size=128),\n", - " )" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Run triton server with defined model inference callable" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "triton.run()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Example inference performed with ModelClient calling triton server" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from pytriton.client import ModelClient\n", - "batch_size = 2\n", - "a_batch = np.ones((batch_size, 1), dtype=np.float32)\n", - "b_batch = np.ones((batch_size, 1), dtype=np.float32)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "with ModelClient(\"localhost\", \"AddSub\") as client:\n", - " result_batch = client.infer_batch(a_batch, b_batch)\n", - "\n", - "for output_name, data_batch in result_batch.items():\n", - " print(f\"{output_name}: {data_batch.tolist()}\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Re-setup triton server with modified inference callable" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Stop triton server" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "triton.stop()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Redefine inference callable" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "@batch\n", - "def _add_sub(**inputs):\n", - " a_batch, b_batch = inputs.values()\n", - " add_batch = (a_batch + b_batch) * 2\n", - " sub_batch = (a_batch - b_batch) * 3\n", - " return {\"add\": add_batch, \"sub\": sub_batch}" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Load model again" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "triton.bind(\n", - " model_name=\"AddSub\",\n", - " infer_func=_add_sub,\n", - " inputs=[\n", - " Tensor(dtype=np.float32, shape=(-1,)),\n", - " Tensor(dtype=np.float32, shape=(-1,)),\n", - " ],\n", - " outputs=[\n", - " Tensor(name=\"add\", dtype=np.float32, shape=(-1,)),\n", - " Tensor(name=\"sub\", dtype=np.float32, shape=(-1,)),\n", - " ],\n", - " config=ModelConfig(max_batch_size=128),\n", - " )" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Run triton server with new model inference callable" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "triton.run()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## The same inference performed with modified inference callable" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "with ModelClient(\"localhost\", \"AddSub\") as client:\n", - " result_batch = client.infer_batch(a_batch, b_batch)\n", - "\n", - "for output_name, data_batch in result_batch.items():\n", - " print(f\"{output_name}: {data_batch.tolist()}\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Stop server at the end" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "triton.stop()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.10" - } - }, - "nbformat": 4, - "nbformat_minor": 1 -} diff --git a/stf/stf-api-alternative/pytriton/examples/add_sub_python/README.md b/stf/stf-api-alternative/pytriton/examples/add_sub_python/README.md deleted file mode 100644 index ac7120b0b07276c097945212a9e001748f13353b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/add_sub_python/README.md +++ /dev/null @@ -1,53 +0,0 @@ - - -# Add-Sub Python Model - -## Overview - -The example presents a simple Add-Sub model which perform an addition and subtraction operations -on passed input data. - -Example consists of following scripts: - -- `install.sh` - install additional dependencies -- `server.py` - start the model with Triton Inference Server -- `client.py` - execute HTTP/gRPC requests to the deployed model - -## Quick Start - -The step-by-step guide: - -1. Install PyTriton following the [installation instruction](../../README.md#installation) -2. Install the additional packages using `install.sh` - -```shell -./install.sh -``` - -3. In current terminal start the model on Triton using `server.py` - -```shell -./server.py -``` - -4. Open new terminal tab (ex. `Ctrl + T` on Ubuntu) or window -5. Go to the example directory -6. Run the `client.py` to perform queries on model: - -```shell -./client.py -``` diff --git a/stf/stf-api-alternative/pytriton/examples/add_sub_python/__init__.py b/stf/stf-api-alternative/pytriton/examples/add_sub_python/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/add_sub_python/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/add_sub_python/client.py b/stf/stf-api-alternative/pytriton/examples/add_sub_python/client.py deleted file mode 100644 index 53536afd9ce571db1225842956bb6a6afc3b7ecc..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/add_sub_python/client.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Client for add_sub_python sample server.""" -import logging - -import numpy as np - -from pytriton.client import ModelClient - -logger = logging.getLogger("examples.add_sub_python.client") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - -batch_size = 2 -a_batch = np.ones((batch_size, 1), dtype=np.float32) -b_batch = np.ones((batch_size, 1), dtype=np.float32) - -logger.info(f"a: {a_batch.tolist()}") -logger.info(f"b: {b_batch.tolist()}") - -with ModelClient("localhost", "AddSub") as client: - logger.info("Sending inference request") - result_batch = client.infer_batch(a_batch, b_batch) - -for output_name, data_batch in result_batch.items(): - logger.info(f"{output_name}: {data_batch.tolist()}") diff --git a/stf/stf-api-alternative/pytriton/examples/add_sub_python/install.sh b/stf/stf-api-alternative/pytriton/examples/add_sub_python/install.sh deleted file mode 100644 index 73854bc2993639c6f6ee7387dcb8880c8cf10e84..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/add_sub_python/install.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -set -xe - -pip install numpy \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/add_sub_python/server.py b/stf/stf-api-alternative/pytriton/examples/add_sub_python/server.py deleted file mode 100644 index af801368f35237f91764dbb6026482fd91d80338..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/add_sub_python/server.py +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Server with simple python model performing adding and subtract operation.""" -import logging - -import numpy as np - -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton - -logger = logging.getLogger("examples.add_sub_python.server") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - -@batch -def _add_sub(**inputs): - a_batch, b_batch = inputs.values() - add_batch = a_batch + b_batch - sub_batch = a_batch - b_batch - return {"add": add_batch, "sub": sub_batch} - - -with Triton() as triton: - logger.info("Loading AddSub model") - triton.bind( - model_name="AddSub", - infer_func=_add_sub, - inputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - Tensor(dtype=np.float32, shape=(-1,)), - ], - outputs=[ - Tensor(name="add", dtype=np.float32, shape=(-1,)), - Tensor(name="sub", dtype=np.float32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128), - strict=True, - ) - logger.info("Serving model") - triton.serve() diff --git a/stf/stf-api-alternative/pytriton/examples/add_sub_python_with_optional/README.md b/stf/stf-api-alternative/pytriton/examples/add_sub_python_with_optional/README.md deleted file mode 100644 index 0edcc252e4da360414deace06a99badfcd1bc840..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/add_sub_python_with_optional/README.md +++ /dev/null @@ -1,53 +0,0 @@ - - -# Add-Sub Python Model with optional input - -## Overview - -The example presents a simple Add-Sub model which perform an addition and subtraction operations -on passed input data and uses additional optional input. - -Example consists of following scripts: - -- `install.sh` - install additional dependencies -- `server.py` - start the model with Triton Inference Server -- `client.py` - execute HTTP/gRPC requests to the deployed model - -## Quick Start - -The step-by-step guide: - -1. Install PyTriton following the [installation instruction](../../README.md#installation) -2. Install the additional packages using `install.sh` - -```shell -./install.sh -``` - -3. In current terminal start the model on Triton using `server.py` - -```shell -./server.py -``` - -4. Open new terminal tab (ex. `Ctrl + T` on Ubuntu) or window -5. Go to the example directory -6. Run the `client.py` to perform queries on model: - -```shell -./client.py -``` diff --git a/stf/stf-api-alternative/pytriton/examples/add_sub_python_with_optional/__init__.py b/stf/stf-api-alternative/pytriton/examples/add_sub_python_with_optional/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/add_sub_python_with_optional/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/add_sub_python_with_optional/client.py b/stf/stf-api-alternative/pytriton/examples/add_sub_python_with_optional/client.py deleted file mode 100644 index c2ebcf828b7e11321e948e183c3d6636339f071e..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/add_sub_python_with_optional/client.py +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Client for add_sub_python sample server.""" -import logging - -import numpy as np - -from pytriton.client import ModelClient - -LOGGER = logging.getLogger("examples.add_sub_python_with_optional.client") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - -batch_size = 2 -a_batch = np.ones((batch_size, 1), dtype=np.float32) -b_batch = np.ones((batch_size, 1), dtype=np.float32) - - -def main(): - LOGGER.info(f"a: {a_batch.tolist()}") - LOGGER.info(f"b: {b_batch.tolist()}") - with ModelClient("localhost", "AddSub") as client: - LOGGER.info("Sending inference request") - result_batch1 = client.infer_batch(a_batch, b_batch) - result_batch2 = client.infer_batch(a=a_batch, b=b_batch, w=np.ones((batch_size, 1), dtype=np.float32) * 3) - result_batch3 = client.infer_batch(a_batch, b_batch, np.ones((batch_size, 1), dtype=np.float32) * 5) - result_batch4 = client.infer_batch( - a_batch, - b_batch, - np.ones((batch_size, 1), dtype=np.float32) * 5, - np.ones((batch_size, 1), dtype=np.float32) * 3, - ) - res = [result_batch1, result_batch2, result_batch3, result_batch4] - LOGGER.info("Received inference responses") - for result_batch in res: - for output_name, data_batch in result_batch.items(): - LOGGER.info(f"{output_name}: {data_batch.tolist()}") - LOGGER.info("------------------------") - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/add_sub_python_with_optional/install.sh b/stf/stf-api-alternative/pytriton/examples/add_sub_python_with_optional/install.sh deleted file mode 100644 index 283866c5afffc05f8f43df81552e9bdc7c972870..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/add_sub_python_with_optional/install.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -set -xe - -pip install numpy \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/add_sub_python_with_optional/server.py b/stf/stf-api-alternative/pytriton/examples/add_sub_python_with_optional/server.py deleted file mode 100644 index 7381224adb0e3942d812cb07fa3834b1acf55a21..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/add_sub_python_with_optional/server.py +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Server with simple python model performing adding and subtract operation with optional 'w' and 't' params.""" -import logging - -import numpy as np - -from pytriton.decorators import batch, fill_optionals, first_value -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton - -LOGGER = logging.getLogger("examples.add_sub_python_with_optional.server") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - -@fill_optionals(t=np.array([2.0], dtype=np.float32)) -@batch -@first_value("w") -def _add_sub(a, b, t, **inputs): - w = 1 if "w" not in inputs else inputs["w"] - add_batch = a * w + b + t - sub_batch = a * w - b + t - return {"add": add_batch, "sub": sub_batch} - - -def main(): - with Triton() as triton: - LOGGER.info("Loading AddSub model") - triton.bind( - model_name="AddSub", - infer_func=_add_sub, - inputs=[ - Tensor(dtype=np.float32, shape=(-1,), name="a"), - Tensor(dtype=np.float32, shape=(-1,), name="b"), - Tensor(dtype=np.float32, shape=(-1,), name="t", optional=True), - Tensor(dtype=np.float32, shape=(-1,), name="w", optional=True), - ], - outputs=[ - Tensor(name="add", dtype=np.float32, shape=(-1,)), - Tensor(name="sub", dtype=np.float32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128), - strict=True, - ) - LOGGER.info("Serving model") - triton.serve() - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/LICENSE b/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/LICENSE deleted file mode 100644 index ff5bd9ad14ea9e24a4ab81b36d0080c7008113f6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/LICENSE +++ /dev/null @@ -1,6 +0,0 @@ -Test video - excerpt from Sintel https://durian.blender.org - -License (https://durian.blender.org/sharing/): -CC BY 3.0 - -© copyright Blender Foundation | www.sintel.org \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/README.md b/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/README.md deleted file mode 100644 index da01f47e848aa0d3f7d84521995c213b732b5b9e..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/README.md +++ /dev/null @@ -1,126 +0,0 @@ - - -# ResNet101 PyTorch segmentation example - -## Overview - -The example presents an inference scenario using DALI and ResNet101. - -DALI is a portable, holistic framework for GPU-accelerated data loading and augmentation in deep learning workflows. -DALI supports processing images, videos, and audio data. The key features are: straightforward integration with Triton -Inference Server (using [DALI Backend](https://github.com/triton-inference-server/dali_backend)) and PyTriton, -framework-agnostic processing implementation, batched processing, wide collection of operations and graph-based pipeline -implementation approach. - -ResNet101 is a segmentation model. Together with DALI on board they form the following scenario: - -1. **Preprocessing** - DALI-based typical ResNet preprocessing. Instead of images the input data is a video. - Includes GPU decoding (using NVDEC), resize and normalization. -2. **Inference** - the model returns the probabilities of a given class in every pixel. -3. **Postprocessing** - DALI takes the original image and the probabilities and extracts a particular class. - -Every step mentioned above is executed by the Triton server. Triton client is used only for reading the test -data from disk and handling the result. - -The example consists of following files: - -- `server.py` - start the model with Triton Inference Server, -- `client.py` - execute HTTP/gRPC requests to the deployed model, -- `model_inference.py` - ResNet101 inference with PyTorch, - -## The sharp bits - -Presented scenario is not a straightforward and simple example. There are some sharp bits and we'll try to explain all -of them in this section. - -1. **`prefetch_queue_depth` option**. One of main DALI features is the prefetching - loading next iteration when the - previous one is being processed by the DL model. By default, when the model processes the batch, DALI is preparing - the next iteration, which is expressed by the default value of the `prefetch_queue_depth = 2` argument. While this is - really useful for training, it's not so much for inference - we tend to get the data and quickly process it as soon - as possible. Therefore in most of inference DALI pipeline, the `prefetch_queue_depth = 1`. -1. **`NFCHW -> NCHW` conversion**. When programming DALI pipeline, user does not see the batch dimension - it is hidden. - DALI assumes, that since every operation will be defined the same way for every sample in a batch, the batch can be - implicit. Since the input data to the preprocessing pipeline is a video, the preprocessing pipeline returns the data - in a `NFCHW` layout, where `N` denotes the batch dimension and `F` denotes the frame in a video sequence. Right after - the preprocessing pipeline, the `NFCHW` layout has to be flattened to `(N*F)CHW` layout to form a batch. -1. **Memory limit**. When using DALI with Triton or PyTriton, there are two ways of decoding the videos: - using `fn.decoders.video` or using `fn.inputs.video`. The former receives the encoded buffer via `fn.external_source` - operator and decodes the whole video in one go. On the other hand, the latter is a standalone input to DALI - pipeline (thus receives the data itself) and decodes only portions of the encoded video, specified - by `sequence_lenght` operator. This behaviour is required for longer videos, as when they are decoded they can take - terabytes of RAM. Using `fn.inputs.video` with Triton or PyTriton requires setting DALI model as - a [`decoupled model`](https://github.com/triton-inference-server/server/blob/main/docs/user_guide/decoupled_models.md), - so that it can generate multiple responses per one request. Since PyTriton does not support decoupled model - yet, `fn.decoders.video` is used in this example. For more details about the video decoding in DALI please refer to - the operators documentation: [`fn.decoders.video`](https://docs.nvidia.com/deeplearning/dali/user-guide/docs/operations/nvidia.dali.fn.experimental.decoders.video.html#nvidia.dali.fn.experimental.decoders.video) - and [`fn.inputs.video`](https://docs.nvidia.com/deeplearning/dali/user-guide/docs/operations/nvidia.dali.fn.experimental.inputs.video.html). - -## Running example - -### Prerequisities - -This example assumes the following dependencies installed in your system: - -1. Docker -2. NumPy -3. OpenCV-Python (optionally, for saving the images to disk) -4. PyTriton (for the `client.py` script) - -### Run - -To run this example, please follow these steps: - -1. Install required dependencies. - -2. Run the NVIDIA PyTorch container: - -```shell -$ docker run -it --gpus all --shm-size 8gb -v $(pwd):/dali -w /dali --net host nvcr.io/nvidia/pytorch:23.10-py3 bash -``` - -3. Install PyTriton following the [installation instruction](../../README.md#installation) - -4. Inside the container start the Triton server: - -```shell -$ python server.py -``` - -5. In a new terminal window run the Triton client: - -```shell -$ python client.py -``` - -### Extra options - -The `client.py` script accepts extra options, listed below: - -1. `--dump-images` - If specified, the original and segmented images will be saved to disk (in a `$(cwd)/test_video` - directory). -2. `--image-paths` - If specified, these paths will be used as the input data for the processing, - instead of the default sample. - -## The result - -Original image: - -![](test_video/orig0.jpg) - -Segmented image: - -![](test_video/segm0.jpg) \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/__init__.py b/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/__init__.py deleted file mode 100644 index dbfe137c14d6287870b58c675131cc1d4284c683..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/client.py b/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/client.py deleted file mode 100644 index 429ad7a6af64adfa0c48341755cfc08639de22d5..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/client.py +++ /dev/null @@ -1,97 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import logging -import pathlib - -import numpy as np # pytype: disable=import-error - -from pytriton.client import ModelClient - -logger = logging.getLogger("examples.dali_resnet101_pytorch.client") - -VIDEO_PATH = "examples/dali_resnet101_pytorch/test_video/sintel_trailer_short.mp4" - - -def load_video(video_path): - return np.array(np.fromfile(video_path, dtype=np.uint8)).reshape(1, -1) - - -def infer_model(input, args): - with ModelClient(args.url, "ResNet101", init_timeout_s=args.init_timeout_s) as client: - result_data = client.infer_batch(input) - - original_batch = result_data["original"] - segmented_batch = result_data["segmented"] - - if args.dump_images: - pathlib.Path("test_video").mkdir(parents=True, exist_ok=True) - for batch_idx, (original, segmented) in enumerate(zip(original_batch, segmented_batch)): - for frame_idx, (orig, segm) in enumerate(zip(original, segmented)): - import cv2 # pytype: disable=import-error - - cv2.imwrite(f"test_video/orig_{batch_idx:03d}_{frame_idx:04d}.jpg", orig) - cv2.imwrite(f"test_video/segm_{batch_idx:03d}_{frame_idx:04d}.jpg", segm) - - logger.info("Processing finished.") - - -def main(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--url", - default="localhost", - help=( - "Url to Triton server (ex. grpc://localhost:8001)." - "HTTP protocol with default port is used if parameter is not provided" - ), - required=False, - ) - parser.add_argument( - "--init-timeout-s", - type=float, - default=600.0, - help="Server and model ready state timeout in seconds.", - required=False, - ) - parser.add_argument( - "--verbose", - action="store_true", - default=False, - ) - parser.add_argument( - "--dump-images", - action="store_true", - default=False, - help="If True, the client will save processed images to disk. Requires cv2 module.", - required=False, - ) - parser.add_argument( - "--video-path", - default=None, - help="Paths of the video to process.", - required=False, - ) - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - infer_model(load_video(VIDEO_PATH if args.video_path is None else args.video_path), args) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/model_inference.py b/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/model_inference.py deleted file mode 100644 index 3304d268d7ddad6c05f04ff2aa19bd58deff75c7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/model_inference.py +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright (c) 2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# ---------------------------------------------------------------------------------------------------- # -# This file is an excerpt from CV-CUDA segmentation example: # -# https://github.com/CVCUDA/CV-CUDA/blob/release_v0.3.x/samples/segmentation/python/model_inference.py # -# ---------------------------------------------------------------------------------------------------- # - -import logging - -import nvtx # pytype: disable=import-error -import torch # pytype: disable=import-error -from torchvision.models import segmentation as segmentation_models # pytype: disable=import-error - - -class SegmentationPyTorch: - def __init__(self, seg_class_name, device_id): - self.logger = logging.getLogger(__name__) - self.device_id = device_id - # Fetch the segmentation index to class name information from the weights - # meta properties. - # The underlying pytorch model that we use for inference is the FCN model - # from torchvision. - torch_model = segmentation_models.fcn_resnet101 - weights = segmentation_models.FCN_ResNet101_Weights.DEFAULT - - try: - self.class_index = weights.meta["categories"].index(seg_class_name) - except ValueError: - raise ValueError( - "Requested segmentation class '%s' is not supported by the " - "fcn_resnet101 model. All supported class names are: %s" - % (seg_class_name, ", ".join(weights.meta["categories"])) - ) - - # Inference uses PyTorch to run a segmentation model on the pre-processed - # input and outputs the segmentation masks. - class FCN_Softmax(torch.nn.Module): # noqa: N801 - def __init__(self, fcn): - super().__init__() - self.fcn = fcn - - def forward(self, x): - infer_output = self.fcn(x)["out"] - return torch.nn.functional.softmax(infer_output, dim=1) - - fcn_base = torch_model(weights=weights) - fcn_base.eval() - self.model = FCN_Softmax(fcn_base).cuda(self.device_id) - self.model.eval() - - self.logger.info("Using PyTorch as the inference engine.") - - def __call__(self, tensor): - nvtx.push_range("inference.torch") - - with torch.no_grad(): - segmented = self.model(tensor) - - nvtx.pop_range() - return segmented diff --git a/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/server.py b/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/server.py deleted file mode 100644 index e72a4fd6647fb2142bab3ee6f6d3fadddc2a3563..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/server.py +++ /dev/null @@ -1,172 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import logging - -import numpy as np # pytype: disable=import-error -import nvidia.dali.fn as fn # pytype: disable=import-error -import nvidia.dali.types as types # pytype: disable=import-error -import torch # pytype: disable=import-error -from model_inference import SegmentationPyTorch # pytype: disable=import-error -from nvidia.dali import pipeline_def # pytype: disable=import-error - -from pytriton.decorators import batch -from pytriton.model_config import DynamicBatcher, ModelConfig, Tensor -from pytriton.triton import Triton, TritonConfig - -MAX_BATCH_SIZE = 32 - -LOGGER = logging.getLogger("examples.dali_resnet101_pytorch.server") - - -@pipeline_def(batch_size=MAX_BATCH_SIZE, num_threads=4, device_id=0, prefetch_queue_depth=1) -def dali_preprocessing_pipe(): - """ - DALI pre-processing pipeline definition. - """ - encoded = fn.external_source(name="encoded") - decoded = fn.experimental.decoders.video(encoded, device="mixed") - preprocessed = fn.resize(decoded, resize_x=224, resize_y=224) - preprocessed = fn.crop_mirror_normalize( - preprocessed, - dtype=types.FLOAT, - output_layout="FCHW", - crop=(224, 224), - mean=[0.485 * 255, 0.456 * 255, 0.406 * 255], - std=[0.229 * 255, 0.224 * 255, 0.225 * 255], - ) - return decoded, preprocessed - - -@pipeline_def(batch_size=MAX_BATCH_SIZE, num_threads=4, device_id=0, prefetch_queue_depth=1) -def dali_postprocessing_pipe(class_idx=0, prob_threshold=0.6): - """ - DALI post-processing pipeline definition - Args: - class_idx: Index of the class that shall be segmented. Shall be correlated with `seg_class_name` argument - in the Model instance. - prob_threshold: Probability threshold, at which the class affiliation is determined. - - Returns: - Segmented images. - """ - image = fn.external_source(device="gpu", name="image", layout="HWC") - width = fn.cast(fn.external_source(device="cpu", name="width"), dtype=types.FLOAT) - height = fn.cast(fn.external_source(device="cpu", name="height"), dtype=types.FLOAT) - prob = fn.external_source(device="gpu", name="probabilities", layout="CHW") - prob = fn.expand_dims(prob[class_idx], axes=[2], new_axis_names="C") - prob = fn.resize(prob, resize_x=width, resize_y=height, interp_type=types.DALIInterpType.INTERP_NN) - mask = fn.cast(prob > prob_threshold, dtype=types.UINT8) - return image * mask - - -# Initialize DALI Pipelines. This step is put outside of `infer_func` so it is performed during Triton initialization. -preprocessing_pipe = dali_preprocessing_pipe() -preprocessing_pipe.build() -postprocessing_pipe = dali_postprocessing_pipe() -postprocessing_pipe.build() - - -def preprocess(images): - """ - Setting DALI pipeline inputs and running the pre-processing. - """ - preprocessing_pipe.feed_input("encoded", images) - imgs, preprocessed = preprocessing_pipe.run() - # DALI's TensorListGpu to Torch's Tensor conversion is conducted with the help of the CuPy. - import cupy as cp # pytype: disable=import-error - - return torch.as_tensor(cp.asarray(imgs.as_tensor()), device=torch.device("cuda")), torch.as_tensor( - cp.asarray(preprocessed.as_tensor()), device=torch.device("cuda") - ) - - -def postprocess(images, probabilities): - """ - Setting DALI pipeline inputs and running the post-processing. - """ - postprocessing_pipe.feed_input("image", images, layout="HWC") - postprocessing_pipe.feed_input("probabilities", probabilities, layout="CHW") - postprocessing_pipe.feed_input("width", np.full(images.shape[0], images.shape[2])) - postprocessing_pipe.feed_input("height", np.full(images.shape[0], images.shape[1])) - (img,) = postprocessing_pipe.run() - return img - - -# Initializing ResNet101. This step is put outside of `infer_func` so it is performed during Triton initialization. -segmentation = SegmentationPyTorch( - seg_class_name="__background__", - device_id=0, -) - - -@batch -def _infer_fn(**inputs): - encoded_video = inputs["video"] - - image, input = preprocess(encoded_video) - batch_size, frames_num = image.shape[:2] - - input = input.reshape(-1, *input.shape[-3:]) # NFCHW to NCHW (flattening first two dimensions) - image = image.reshape(-1, *image.shape[-3:]) # NFHWC to NHWC (flattening first two dimensions) - - prob = segmentation(input) - out = postprocess(image, prob) - - return { - "original": image.cpu().numpy().reshape(batch_size, frames_num, *image.shape[-3:]), - "segmented": out.as_cpu().as_array().reshape(batch_size, frames_num, *image.shape[-3:]), - } - - -def parse_args(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--verbose", - action="store_true", - default=False, - ) - return parser.parse_args() - - -def main(): - args = parse_args() - log_verbose = 1 if args.verbose else 0 - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - with Triton(config=TritonConfig(log_verbose=log_verbose)) as triton: - triton.bind( - model_name="ResNet101", - infer_func=_infer_fn, - inputs=[ - Tensor(name="video", dtype=np.uint8, shape=(-1,)), # Encoded video - ], - outputs=[ - Tensor(name="original", dtype=np.uint8, shape=(-1, -1, -1, -1)), # FHWC - Tensor(name="segmented", dtype=np.uint8, shape=(-1, -1, -1, -1)), # FHWC - ], - config=ModelConfig( - max_batch_size=MAX_BATCH_SIZE, - batcher=DynamicBatcher(max_queue_delay_microseconds=5000), - ), - strict=True, - ) - triton.serve() - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/test_video/orig0.jpg b/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/test_video/orig0.jpg deleted file mode 100644 index 7e4652c580fd0676f0b5696d6428b8b6646fa7f5..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/test_video/orig0.jpg and /dev/null differ diff --git a/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/test_video/segm0.jpg b/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/test_video/segm0.jpg deleted file mode 100644 index 2f28e4139256f7cb6b1624625238dad864346a9d..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/test_video/segm0.jpg and /dev/null differ diff --git a/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/test_video/sintel_trailer_short.mp4 b/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/test_video/sintel_trailer_short.mp4 deleted file mode 100644 index c92d0eb39ca4a176834ad246da889ed448e2116c..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/pytriton/examples/dali_resnet101_pytorch/test_video/sintel_trailer_short.mp4 and /dev/null differ diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/README.md b/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/README.md deleted file mode 100644 index 9aa66e9c68786f32ad87479ca20190d835d35376..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/README.md +++ /dev/null @@ -1,137 +0,0 @@ - - -# HuggingFace BART PyTorch Model - -## Overview - -The example presents a HuggingFace BART PyTorch model inference. - -Example consists of following scripts: - -- `install.sh` - install additional dependencies for downloading model from HuggingFace -- `server.py` - start the model with Triton Inference Server -- `client.py` - execute HTTP/gRPC requests to the deployed model - -And configurations: - -- `kubernetes` - example Helm Charts for serving and test inference in Kubernetes cluster - -## Running example locally - -To run example locally the `torch` package is required. It can be installed in your current environment using pip: - -```shell -pip install torch -``` - -Or you can use NVIDIA PyTorch container: -```shell -docker run -it --gpus 1 --shm-size 8gb -v {repository_path}:{repository_path} -w {repository_path} nvcr.io/nvidia/pytorch:23.10-py3 bash -``` - -If you select to use container we recommend to install -[NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/overview.html). - -Follow the step-by-step guide to execute the example: - -1. Install PyTriton following the [installation instruction](../../README.md#installation) -2. Install the additional packages using `install.sh` - -```shell -./install.sh -``` - -3. In current terminal start the model on Triton using `server.py` - -```shell -./server.py -``` - -4. Open new terminal tab (ex. `Ctrl + T` on Ubuntu) or window -5. Go to the example directory -6. Run the `client.py` to perform queries on model: - -```shell -./client.py -``` - -## Running example on Kubernetes cluster - -The following prerequisites must be matched to run the example: - -- Kubernetes cluster with NVIDIA GPU node -- [NVIDIA Device Plugin](https://github.com/NVIDIA/k8s-device-plugin) installed in Kubernetes cluster -- Docker Containers Registry accessible from Kubernetes cluster -- [Installed Helm](https://helm.sh/docs/intro/install/) for creating the deployment and test job - -Optionally you may install NVIDIA Container Toolkit and NVIDIA GPU Operator which enable more features -like [MIG](https://docs.nvidia.com/datacenter/cloud-native/gpu-operator/gpu-operator-mig.html) or -[Time Slicing](https://docs.nvidia.com/datacenter/cloud-native/gpu-operator/gpu-sharing.html) support in the cluster. -To learn more how to set up Kubernetes cluster with NVIDIA GPU you can review [ -NVIDIA Cloud Native Documentation](https://docs.nvidia.com/datacenter/cloud-native/contents.html) - -Below, we present a step-by-step guide assuming that **all the commands are executed from the root of repository**. - -Follow these steps to run and test example in the cluster: -1. [Optional] Build PyTriton wheel following the [build instruction](../../docs/building.md) -2. Prepare the tag under which Docker image is going to be pushed to your Docker Containers Registry accessible from Kubernetes -cluster. Example for local cluster (minikube, k3s) with registry hosted inside the cluster: -```shell -export DOCKER_IMAGE_NAME_WITH_TAG=localhost:5000/bart-pytorch-example:latest -``` -3. Build and push the Docker image to your registry: - -```shell -# Export the base image used for build -export FROM_IMAGE_NAME=nvcr.io/nvidia/pytorch:23.10-py3 -./examples/huggingface_bart_pytorch/kubernetes/build_and_push.sh -``` -**Note**: By default the container is built using `pytriton` package from `GitHub`. To build container with wheel built -locally use `export BUILD_FROM=dist` before executing script. - -4. Install the Helm Chart with deployment and service: - -```shell -helm upgrade -i --set deployment.image=${DOCKER_IMAGE_NAME_WITH_TAG} \ -bart-pytorch-example \ -./examples/huggingface_bart_pytorch/kubernetes/deployment -``` - -5. Install the Helm Chart with client test - -```shell -helm install --set image=${DOCKER_IMAGE_NAME_WITH_TAG} \ -bart-pytorch-example-test \ -./examples/huggingface_bart_pytorch/kubernetes/test -``` - -Now, you can review the logs from the running PODs to verify the inference is running. To show the logs from cluster -for given POD first list all running pods: -```shell -kubectl get pods -``` - -Next show logs from server or client: -```shell -kubectl logs {NAME} -``` - -To remove the installed charts simply run: -```shell -helm uninstall bart-pytorch-example-test -helm uninstall bart-pytorch-example -``` \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/__init__.py b/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/client.py b/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/client.py deleted file mode 100644 index c181a882edcaa39eddee2b93a038bb6b6ba8e1a9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/client.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Client for BART classifier sample server.""" -import argparse -import logging - -import numpy as np - -from pytriton.client import ModelClient - -logger = logging.getLogger("examples.huggingface_bart_pytorch.client") - - -def main(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--url", - default="localhost", - help=( - "Url to Triton server (ex. grpc://localhost:8001)." - "HTTP protocol with default port is used if parameter is not provided" - ), - required=False, - ) - parser.add_argument( - "--init-timeout-s", - type=float, - default=600.0, - help="Server and model ready state timeout in seconds", - required=False, - ) - parser.add_argument( - "--iterations", - type=int, - default=1, - help="Number of requests per client.", - required=False, - ) - parser.add_argument( - "--verbose", - action="store_true", - default=False, - ) - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - sequence = np.array( - [ - ["one day I will see the world"], - ["I would love to learn cook the Asian street food"], - ["Carnival in Rio de Janeiro"], - ["William Shakespeare was a great writer"], - ] - ) - sequence = np.char.encode(sequence, "utf-8") - logger.info(f"Sequence: {sequence}") - - with ModelClient(args.url, "BART", init_timeout_s=args.init_timeout_s) as client: - for req_idx in range(1, args.iterations + 1): - logger.info(f"Sending request ({req_idx}).") - result_dict = client.infer_batch(sequence) - for output_name, output_data in result_dict.items(): - output_data = np.array2string( - output_data, threshold=np.inf, max_line_width=np.inf, separator="," - ).replace("\n", "") - logger.info(f"{output_name}: {output_data} for request ({req_idx}).") - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/install.sh b/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/install.sh deleted file mode 100644 index d89ba12b7b2220e8c6f53b3261056b4748b96c27..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/install.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -set -xe - -pip install transformers diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/Dockerfile b/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/Dockerfile deleted file mode 100644 index 7e6809bdcadb13a7b5f883e0dde3f41d141e87e0..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/Dockerfile +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -ARG FROM_IMAGE_NAME=nvcr.io/nvidia/pytorch:23.10-py3 -ARG BUILD_FROM - -FROM ${FROM_IMAGE_NAME} as base -WORKDIR /opt/app - -# Use when build PyTriton from source -FROM base as install-from-dist -COPY dist/*.whl /opt/app -RUN pip install /opt/app/*.whl - -# Install from pypi -FROM base as install-from-pypi -RUN pip install -U nvidia-pytriton - -FROM install-from-${BUILD_FROM} AS image - -ENV PYTHONUNBUFFERED=1 - -WORKDIR /opt/app - -COPY examples/huggingface_bart_pytorch/install.sh /opt/app -RUN /opt/app/install.sh - -COPY examples/huggingface_bart_pytorch/client.py /opt/app -COPY examples/huggingface_bart_pytorch/server.py /opt/app - -ENTRYPOINT [] \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/build_and_push.sh b/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/build_and_push.sh deleted file mode 100644 index 49bd03672cc233a85b41956fc1e30bf2fa31d8a7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/build_and_push.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -if [ -z ${DOCKER_IMAGE_NAME_WITH_TAG} ]; then - echo "Provide Docker image name under to push the created image to your registry" - echo "Example:" - echo " export DOCKER_IMAGE_NAME_WITH_TAG=my-registry:5000/bart-pytorch-example:latest" - exit 1 -fi - -if [ -z ${FROM_IMAGE_NAME} ]; then - echo "Provide Docker image that would be used as base image" - echo "Example:" - echo " export FROM_IMAGE_NAME=nvcr.io/nvidia/pytorch:23.10-py3" - exit 1 -fi - -BUILD_FROM="${BUILD_FROM:-pypi}" -if [[ ${BUILD_FROM} != "pypi" ]] && [[ ${BUILD_FROM} != "dist" ]]; then - echo "The BUILD_FROM variable should be equal to 'pypi' or 'dist'" - echo "Example:" - echo " export BUILD_FROM=dist" - exit 1 -fi - -set -xe - -DOCKER_BUILDKIT=1 docker build -f examples/huggingface_bart_pytorch/kubernetes/Dockerfile \ - -t ${DOCKER_IMAGE_NAME_WITH_TAG} \ - --build-arg FROM_IMAGE_NAME=${FROM_IMAGE_NAME} \ - --build-arg BUILD_FROM=${BUILD_FROM} . -docker push ${DOCKER_IMAGE_NAME_WITH_TAG} \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/deployment/Chart.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/deployment/Chart.yaml deleted file mode 100644 index ec97fbce3022bbe01bf7eace1424aa438dce7ad9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/deployment/Chart.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v2 -description: BART PyTriton Example -name: bart-pytorch-example -version: 0.0.1 -appVersion: 0.0.1 diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/deployment/templates/_helpers.tpl b/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/deployment/templates/_helpers.tpl deleted file mode 100644 index 79274ecc63d15d74bdd0b20f9fcf67a6666ee3e6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/deployment/templates/_helpers.tpl +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -{{/* vim: set filetype=mustache: */}} -{{/* -Expand the name of the chart. -*/}} -{{- define "selector.name" -}} -{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} -{{- end -}} - -{{/* -Create a default fully qualified app name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -If release name contains chart name it will be used as a full name. -*/}} -{{- define "selector.fullname" -}} -{{- if .Values.fullnameOverride -}} -{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- $name := default .Chart.Name .Values.nameOverride -}} -{{- if contains $name .Release.Name -}} -{{- .Release.Name | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} -{{- end -}} -{{- end -}} -{{- end -}} - -{{/* -Create chart name and version as used by the chart label. -*/}} -{{- define "selector.chart" -}} -{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} -{{- end -}} diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/deployment/templates/deployment.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/deployment/templates/deployment.yaml deleted file mode 100644 index 3810487ffc48a7b6bf19e9ad9ac7a73c9b99fda9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/deployment/templates/deployment.yaml +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: {{ .Release.Name }}-deployment - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{ template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - replicas: {{ .Values.replicaCount }} - selector: - matchLabels: - app: {{ template "selector.name" . }} - release: {{ .Release.Name }} - template: - metadata: - labels: - app: {{ template "selector.name" . }} - release: {{.Release.Name}} - annotations: - date: "{{ now | unixEpoch }}" - spec: - {{- if .Values.imagePullSecret }} - imagePullSecrets: - - name: {{.Values.imagePullSecret }} - {{ end }} - restartPolicy: {{ .Values.restartPolicy }} - containers: - - name: {{ .Chart.Name }} - image: {{ .Values.deployment.image }} - imagePullPolicy: {{ .Values.pullPolicy }} - command: ["bash", "-c", "/opt/app/server.py --verbose"] - ports: - - containerPort: 8000 - name: http - - containerPort: 8001 - name: grpc - - containerPort: 8002 - name: metrics - livenessProbe: - initialDelaySeconds: 60 - periodSeconds: 10 - httpGet: - path: /v2/health/live - port: http - readinessProbe: - initialDelaySeconds: 60 - periodSeconds: 10 - httpGet: - path: /v2/health/ready - port: http - resources: - requests: - nvidia.com/gpu: {{ .Values.deployment.numOfGPUs }} - limits: - nvidia.com/gpu: {{ .Values.deployment.numOfGPUs }} - volumeMounts: - - name: shared-memory - mountPath: /dev/shm - volumes: - - name: shared-memory - emptyDir: - medium: Memory diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/deployment/templates/service.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/deployment/templates/service.yaml deleted file mode 100644 index 79d7c7f62cd8d714edbd6aec8721d7691007d7c8..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/deployment/templates/service.yaml +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -kind: Service -metadata: - name: {{ template "selector.fullname" . }}-service - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - type: {{ .Values.service.type }} - ports: - - port: 8000 - targetPort: 8000 - name: http - - port: 8001 - targetPort: 8001 - name: grpc - - port: 8002 - targetPort: 8002 - name: metrics - selector: - app: {{ template "selector.name" . }} - release: {{ .Release.Name }} diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/deployment/values.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/deployment/values.yaml deleted file mode 100644 index 6c9106bbcc5ce27fa872fa3e4d7e4529872eaff9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/deployment/values.yaml +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -imagePullSecret: null -pullPolicy: Always -restartPolicy: Always -replicaCount: 1 -deployment: - image: null - numOfGPUs: 1 -service: - type: ClusterIP diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/test/Chart.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/test/Chart.yaml deleted file mode 100644 index 8d751daffc1cfdb0badd0b0774ea3025ca8bcb36..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/test/Chart.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v2 -description: BART PyTriton Example Test -name: bart-pytorch-example-test -version: 0.0.1 -appVersion: 0.0.1 diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/test/templates/_helpers.tpl b/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/test/templates/_helpers.tpl deleted file mode 100644 index 79274ecc63d15d74bdd0b20f9fcf67a6666ee3e6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/test/templates/_helpers.tpl +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -{{/* vim: set filetype=mustache: */}} -{{/* -Expand the name of the chart. -*/}} -{{- define "selector.name" -}} -{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} -{{- end -}} - -{{/* -Create a default fully qualified app name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -If release name contains chart name it will be used as a full name. -*/}} -{{- define "selector.fullname" -}} -{{- if .Values.fullnameOverride -}} -{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- $name := default .Chart.Name .Values.nameOverride -}} -{{- if contains $name .Release.Name -}} -{{- .Release.Name | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} -{{- end -}} -{{- end -}} -{{- end -}} - -{{/* -Create chart name and version as used by the chart label. -*/}} -{{- define "selector.chart" -}} -{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} -{{- end -}} diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/test/templates/job.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/test/templates/job.yaml deleted file mode 100644 index 8c4fb263bc6f09a54e3ff1151df6c0635c0003f0..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/test/templates/job.yaml +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: batch/v1 -kind: Job -metadata: - name: {{ .Release.Name }}-deployment - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{ template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - template: - metadata: - labels: - app: {{ template "selector.name" . }} - release: {{.Release.Name}} - annotations: - date: "{{ now | unixEpoch }}" - spec: - {{- if .Values.imagePullSecret }} - imagePullSecrets: - - name: {{.Values.imagePullSecret }} - {{ end }} - containers: - - name: {{ .Chart.Name }} - image: {{ .Values.image }} - imagePullPolicy: {{ .Values.pullPolicy }} - command: ["bash", "-c", "/opt/app/client.py --url ${SERVICE_URL} --iterations 100 --verbose"] - env: - - name: SERVICE_URL - value: {{ .Values.serviceUrl }} - restartPolicy: {{ .Values.restartPolicy }} - backoffLimit: {{ .Values.backoffLimit }} diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/test/values.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/test/values.yaml deleted file mode 100644 index 6a37227c8783a30033d28f8ca165d6296f0c192b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/kubernetes/test/values.yaml +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -imagePullSecret: null -pullPolicy: Always -restartPolicy: OnFailure -backoffLimit: 4 -image: null -serviceUrl: "http://bart-pytorch-example-service:8000" \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/server.py b/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/server.py deleted file mode 100644 index 274697c244b094c798f7f6593d1fd8c374477afc..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bart_pytorch/server.py +++ /dev/null @@ -1,97 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Simple classifier example based on Hugging Face Pytorch BART model.""" -import argparse -import logging - -import numpy as np -from transformers import pipeline # pytype: disable=import-error - -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton - -logger = logging.getLogger("examples.huggingface_bart_pytorch.server") - -CLASSIFIER = pipeline("zero-shot-classification", model="facebook/bart-large-mnli", device=0) - -# Labels pre-cached on server side -LABELS = [ - "travel", - "cooking", - "dancing", - "sport", - "music", - "entertainment", - "festival", - "movie", - "literature", -] - - -@batch -def _infer_fn(sequence: np.ndarray): - sequence = np.char.decode(sequence.astype("bytes"), "utf-8") - sequence = sequence.tolist() - - classification_result = CLASSIFIER(sequence, LABELS) - result_labels = [] - for result in classification_result: - logger.debug(result) - most_probable_label = result["labels"][0] - result_labels.append([most_probable_label]) - - return {"label": np.char.encode(result_labels, "utf-8")} - - -def main(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--max-batch-size", - type=int, - default=8, - help="Batch size of request.", - required=False, - ) - parser.add_argument( - "--verbose", - action="store_true", - default=False, - ) - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - with Triton() as triton: - logger.info("Loading BART model.") - triton.bind( - model_name="BART", - infer_func=_infer_fn, - inputs=[ - Tensor(name="sequence", dtype=bytes, shape=(1,)), - ], - outputs=[ - Tensor(name="label", dtype=bytes, shape=(1,)), - ], - config=ModelConfig(max_batch_size=args.max_batch_size), - strict=True, - ) - logger.info("Serving inference") - triton.serve() - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bert_jax/README.md b/stf/stf-api-alternative/pytriton/examples/huggingface_bert_jax/README.md deleted file mode 100644 index 2f7ce698b645b5fba3424a555de89a84af080a98..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bert_jax/README.md +++ /dev/null @@ -1,53 +0,0 @@ - - -# HuggingFace BERT JAX Model - -## Overview - -The example presents a HuggingFace BERT JAX model inference. - -Example consists of following scripts: - -- `install.sh` - install additional dependencies for downloading model from HuggingFace and JAX library -- `server.py` - start the model with Triton Inference Server -- `client.py` - execute HTTP/gRPC requests to the deployed model - -## Quick Start - -The step-by-step guide: - -1. Install PyTriton following the [installation instruction](../../README.md#installation) -2. Install the additional packages using `install.sh` - -```shell -./install.sh -``` - -3. In current terminal start the model on Triton using `server.py` - -```shell -./server.py -``` - -4. Open new terminal tab (ex. `Ctrl + T` on Ubuntu) or window -5. Go to the example directory -6. Run the `client.py` to perform queries on model: - -```shell -./client.py -``` - diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bert_jax/__init__.py b/stf/stf-api-alternative/pytriton/examples/huggingface_bert_jax/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bert_jax/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bert_jax/client.py b/stf/stf-api-alternative/pytriton/examples/huggingface_bert_jax/client.py deleted file mode 100644 index e5b4191307bb9071693916472729ab928605ba70..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bert_jax/client.py +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Client for BERT classifier sample server.""" -import logging - -import numpy as np - -from pytriton.client import ModelClient - -logger = logging.getLogger("examples.huggingface_bert_jax.client") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - -init_timeout_s = 600 # increase default timeout to let model download from HF hub -sequence = np.array([b"Hello, my dog is cute"]) - -logger.info(f"Input: {sequence}") -logger.info("Sending request") -with ModelClient("localhost", "BERT", init_timeout_s=init_timeout_s) as client: - result_dict = client.infer_sample(sequence) - - -for output_name, output_data in result_dict.items(): - output_data = np.array2string(output_data, max_line_width=np.inf, separator=",").replace("\n", "") - logger.info(f"{output_name}: {output_data}") diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bert_jax/install.sh b/stf/stf-api-alternative/pytriton/examples/huggingface_bert_jax/install.sh deleted file mode 100644 index 03be74725371b611ec7399f07bf23dae097de019..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bert_jax/install.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -set -xe - -# 0.4.14 raises error -pip install --upgrade "jax[cuda12_pip]!=0.4.14" -f https://storage.googleapis.com/jax-releases/jax_cuda_releases.html -# torch is required for checkpoint loading -pip install transformers flax torch \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_bert_jax/server.py b/stf/stf-api-alternative/pytriton/examples/huggingface_bert_jax/server.py deleted file mode 100644 index 17ff7a954b69870f4551292c057f63ab29170604..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_bert_jax/server.py +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Simple classifier example based on Hugging Face JAX BERT model.""" -import logging - -import numpy as np -from transformers import BertTokenizer, FlaxBertModel # pytype: disable=import-error - -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton - -logger = logging.getLogger("examples.huggingface_bert_jax.server") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - -tokenizer = BertTokenizer.from_pretrained("bert-base-uncased") -model = FlaxBertModel.from_pretrained("bert-base-uncased") - - -@batch -def _infer_fn(**inputs: np.ndarray): - (sequence_batch,) = inputs.values() - - # need to convert dtype=object to bytes first - # end decode unicode bytes - sequence_batch = np.char.decode(sequence_batch.astype("bytes"), "utf-8") - - last_hidden_states = [] - for sequence_item in sequence_batch: - tokenized_sequence = tokenizer(sequence_item.item(), return_tensors="jax") - results = model(**tokenized_sequence) - last_hidden_states.append(results.last_hidden_state) - last_hidden_states = np.array(last_hidden_states, dtype=np.float32) - return [last_hidden_states] - - -with Triton() as triton: - logger.info("Loading BERT model.") - triton.bind( - model_name="BERT", - infer_func=_infer_fn, - inputs=[ - Tensor(name="sequence", dtype=np.bytes_, shape=(1,)), - ], - outputs=[ - Tensor( - name="last_hidden_state", - dtype=np.float32, - shape=(-1, -1, -1), - ), - ], - config=ModelConfig(max_batch_size=16), - strict=True, - ) - logger.info("Serving inference") - triton.serve() diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_dialogpt_streaming_pytorch/README.md b/stf/stf-api-alternative/pytriton/examples/huggingface_dialogpt_streaming_pytorch/README.md deleted file mode 100644 index a32ceb444a73665b4a39b754551f7e5234c5bd05..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_dialogpt_streaming_pytorch/README.md +++ /dev/null @@ -1,137 +0,0 @@ - - -# HuggingFace BART PyTorch Model - -## Overview - -The example presents a HuggingFace BART PyTorch model inference. - -Example consists of following scripts: - -- `install.sh` - install additional dependencies for downloading model from HuggingFace -- `server.py` - start the model with Triton Inference Server -- `client.py` - execute HTTP/gRPC requests to the deployed model - -And configurations: - -- `kubernetes` - example Helm Charts for serving and test inference in Kubernetes cluster - -## Running example locally - -To run example locally the `torch` package is required. It can be installed in your current environment using pip: - -```shell -pip install torch -``` - -Or you can use NVIDIA PyTorch container: -```shell -docker run -it --gpus 1 --shm-size 8gb -v {repository_path}:{repository_path} -w {repository_path} nvcr.io/nvidia/pytorch:23.10-py3 bash -``` - -If you select to use container we recommend to install -[NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/overview.html). - -Follow the step-by-step guide to execute the example: - -1. Install PyTriton following the [installation instruction](../../README.md#installation) -2. Install the additional packages using `install.sh` - -```shell -./install.sh -``` - -3. In current terminal start the model on Triton using `server.py` - -```shell -./server.py -``` - -4. Open new terminal tab (ex. `Ctrl + T` on Ubuntu) or window -5. Go to the example directory -6. Run the `client.py` to perform queries on model: - -```shell -./client.py -``` - -## Running example on Kubernetes cluster - -The following prerequisites must be matched to run the example: - -- Kubernetes cluster with NVIDIA GPU node -- [NVIDIA Device Plugin](https://github.com/NVIDIA/k8s-device-plugin) installed in Kubernetes cluster -- Docker Containers Registry accessible from Kubernetes cluster -- [Installed Helm](https://helm.sh/docs/intro/install/) for creating the deployment and test job - -Optionally you may install NVIDIA Container Toolkit and NVIDIA GPU Operator which enable more features -like [MIG](https://docs.nvidia.com/datacenter/cloud-native/gpu-operator/gpu-operator-mig.html) or -[Time Slicing](https://docs.nvidia.com/datacenter/cloud-native/gpu-operator/gpu-sharing.html) support in the cluster. -To learn more how to set up Kubernetes cluster with NVIDIA GPU you can review [ -NVIDIA Cloud Native Documentation](https://docs.nvidia.com/datacenter/cloud-native/contents.html) - -Below, we present a step-by-step guide assuming that **all the commands are executed from the root of repository**. - -Follow these steps to run and test example in the cluster: -1. [Optional] Build PyTriton wheel following the [build instruction](../../docs/building.md) -2. Prepare the tag under which Docker image is going to be pushed to your Docker Containers Registry accessible from Kubernetes -cluster. Example for local cluster (minikube, k3s) with registry hosted inside the cluster: -```shell -export DOCKER_IMAGE_NAME_WITH_TAG=localhost:5000/bart-pytorch-example:latest -``` -3. Build and push the Docker image to your registry: - -```shell -# Export the base image used for build -export FROM_IMAGE_NAME=nvcr.io/nvidia/pytorch:23.10-py3 -./examples/huggingface_bart_pytorch/kubernetes/build_and_push.sh -``` -**Note**: By default the container is built using `pytriton` package from `GitHub`. To build container with wheel built -locally use `export BUILD_FROM=dist` before executing script. - -4. Install the Helm Chart with deployment and service: - -```shell -helm upgrade -i --set deployment.image=${DOCKER_IMAGE_NAME_WITH_TAG} \ -bart-pytorch-example \ -./examples/huggingface_bart_pytorch/kubernetes/deployment -``` - -5. Install the Helm Chart with client test - -```shell -helm install --set image=${DOCKER_IMAGE_NAME_WITH_TAG} \ -bart-pytorch-example-test \ -./examples/huggingface_bart_pytorch/kubernetes/test -``` - -Now, you can review the logs from the running PODs to verify the inference is running. To show the logs from cluster -for given POD first list all running pods: -```shell -kubectl get pods -``` - -Next show logs from server or client: -```shell -kubectl logs {NAME} -``` - -To remove the installed charts simply run: -```shell -helm uninstall bart-pytorch-example-test -helm uninstall bart-pytorch-example -``` \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_dialogpt_streaming_pytorch/__init__.py b/stf/stf-api-alternative/pytriton/examples/huggingface_dialogpt_streaming_pytorch/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_dialogpt_streaming_pytorch/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_dialogpt_streaming_pytorch/client.py b/stf/stf-api-alternative/pytriton/examples/huggingface_dialogpt_streaming_pytorch/client.py deleted file mode 100644 index 82091bfc640520cdb603b1e2dcb74759a6125a11..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_dialogpt_streaming_pytorch/client.py +++ /dev/null @@ -1,104 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Client for HF microsoft/DialoGPT sample server.""" -import argparse -import logging - -import numpy as np - -from pytriton.client import DecoupledModelClient - -_LOGGER = logging.getLogger("examples.huggingface_dialogpt_pytroch_streaming.client") - - -def main(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--url", - default="grpc://localhost:8001", - help=( - "Url to Triton server (ex. grpc://localhost:8001)." "GRPC protocol is only supported with decoupled models" - ), - required=False, - ) - parser.add_argument("--model-name", default="DialoGPT-small", help="Name of the model", required=False) - parser.add_argument( - "--init-timeout-s", - type=float, - default=600.0, - help="Server and model ready state timeout in seconds", - required=False, - ) - parser.add_argument( - "--iterations", - type=int, - default=8, - help="Number of requests per client.", - required=False, - ) - parser.add_argument( - "--interactive", - action="store_true", - default=False, - ) - parser.add_argument( - "--verbose", - action="store_true", - default=False, - ) - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - if args.interactive: - first_input = (input("TYPE AND PRESS ENTER:")).encode("utf-8") - else: - first_input = b"Does money buy happines?" - - chat_history_items = [first_input] # initial prompt - with DecoupledModelClient(args.url, args.model_name, init_timeout_s=args.init_timeout_s) as client: - chat_history = b"" - print("(0) >", chat_history_items[0].decode("utf-8")) # noqa: T201 - - def idx_generator(): - if args.interactive: - i = 1 - while True: - yield i - i += 1 - else: - yield from range(1, args.iterations) - - chat_history = chat_history_items[0] - for idx in idx_generator(): - if idx > 0: - print(f"({idx}) > ", end="", flush=True) # noqa: T201 - for partial_result_dict in client.infer_sample( - new_inputs=np.array(chat_history_items[-1:]), chat_history=np.array([chat_history]) - ): - response_tokens = partial_result_dict["response"].tolist() # pytype: disable=unsupported-operands - chat_history_items.extend(response_tokens) # noqa: T201 - - response_tokens = "".join(token.decode("utf-8") for token in response_tokens) - print(response_tokens, end="", flush=True) # noqa: T201 - print("") # noqa: T201 - if args.interactive: - next_input = (input("TYPE AND PRESS ENTER:")).encode("utf-8") - chat_history_items.append(next_input) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_dialogpt_streaming_pytorch/install.sh b/stf/stf-api-alternative/pytriton/examples/huggingface_dialogpt_streaming_pytorch/install.sh deleted file mode 100644 index 7bd7e1bce78dad247b04b6c8f9a79bc5d2c9e0e4..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_dialogpt_streaming_pytorch/install.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -set -xe - -pip install transformers diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_dialogpt_streaming_pytorch/server.py b/stf/stf-api-alternative/pytriton/examples/huggingface_dialogpt_streaming_pytorch/server.py deleted file mode 100644 index b734d451cab247a9750ae957fb70a005e9eeacf8..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_dialogpt_streaming_pytorch/server.py +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Simple classifier example based on HF microsoft/DialoGPT model.""" -import argparse -import concurrent -import logging -import pathlib -import queue -import typing - -import numpy as np -from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer # pytype: disable=import-error - -from pytriton.decorators import batch # pytype: disable=import-error -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton, TritonConfig - -logger = logging.getLogger("examples.huggingface_dialogpt_pytroch_streaming.server") - - -class StreamingBot: - def __init__(self, model_name: str, *, max_length: int = 1000, timeout_s: typing.Optional[float] = None) -> None: - self.model = AutoModelForCausalLM.from_pretrained(model_name) - self.tokenizer = AutoTokenizer.from_pretrained(model_name) - self.tokenizer.pad_token = self.tokenizer.eos_token - - self._timeout_s = timeout_s - self._max_length = max_length - - self.streamer = TextIteratorStreamer( - self.tokenizer, timeout=self._timeout_s, skip_prompt=True, skip_special_tokens=True - ) - self._executor = concurrent.futures.ThreadPoolExecutor(max_workers=1, thread_name_prefix="streaming_bot") - - @batch - def __call__(self, new_inputs, chat_history=None) -> typing.Any: - inputs_kwargs = self._prepare_inputs(new_inputs, chat_history) # inputs_ids + attention_mask named args - generate_kwargs = dict( - **inputs_kwargs, - streamer=self.streamer, - max_length=self._max_length, - pad_token_id=self.tokenizer.pad_token_id, - ) - generate_future = self._executor.submit(self.model.generate, **generate_kwargs) - - try: - for token in self.streamer: - yield { - "response": np.char.encode([token], "utf-8")[np.newaxis, ...] - } # add batch dimension to match declared signature - except queue.Empty: - generate_future.cancel() - raise TimeoutError(f"Timeout occurred during model generation (timeout_s={self._timeout_s}).") - - generate_future.result() # raise exception if any occurred in model.generation method - - def _prepare_inputs(self, new_inputs, chat_history): - new_inputs = np.char.decode(new_inputs.astype("bytes"), "utf-8") - bot_inputs = np.char.add(new_inputs, self.tokenizer.eos_token) - - if chat_history: - chat_history = np.char.decode(chat_history.astype("bytes"), "utf-8") - bot_inputs = np.char.add(chat_history, bot_inputs) - - bot_inputs = bot_inputs[..., 0].tolist() # reduce to 1D list - model_kwargs = self.tokenizer.batch_encode_plus(bot_inputs, return_tensors="pt", padding=True) - return model_kwargs - - -def main(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument("--max-batch-size", type=int, default=8, help="Batch size of request.", required=False) - parser.add_argument("--model-name", default="microsoft/DialoGPT-small", help="Name of the model", required=False) - parser.add_argument("--verbose", action="store_true", default=False) - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - verbose_level = int(args.verbose) * 3 - logging.basicConfig(level=log_level, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - streaming_bot = StreamingBot(args.model_name, timeout_s=10.0) - - with Triton(config=TritonConfig(log_verbose=verbose_level)) as triton: - triton.bind( - model_name=pathlib.Path(args.model_name).stem, - infer_func=streaming_bot, - inputs=[ - Tensor(name="new_inputs", dtype=bytes, shape=(1,)), - Tensor(name="chat_history", optional=True, dtype=bytes, shape=(1,)), - ], - outputs=[ - Tensor(name="response", dtype=bytes, shape=(1,)), - ], - config=ModelConfig(decoupled=True), - strict=True, - ) - triton.serve() - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/Dockerfile b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/Dockerfile deleted file mode 100644 index e13fd48ee3370f71794055f0984505c55a7611a9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/Dockerfile +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -ARG FROM_IMAGE_NAME=nvcr.io/nvidia/tensorflow:23.10-tf2-py3 -FROM ${FROM_IMAGE_NAME} - -ENV XLA_PYTHON_CLIENT_PREALLOCATE=false -ENV NCCL_LAUNCH_MODE="PARALLEL" - -WORKDIR /workdir - -COPY install.sh . -RUN ./install.sh - -COPY . . diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/README.md b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/README.md deleted file mode 100644 index c8b7e8c0ec6592f49eb2858004f6bbc1650bdf04..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/README.md +++ /dev/null @@ -1,275 +0,0 @@ - - -# Huggingface OPT JAX Multi-node Deployment - -This example shows how to easily deploy JAX large language models in a multi-node environment using PyTriton. In this -tutorial we will be working with [HuggingFace OPT](https://huggingface.co/docs/transformers/model_doc/opt) with up to -530B parameters. - -## Overview - -To run JAX in multi-GPU and/or multi-node environment we are -using [jax.distributed](https://jax.readthedocs.io/en/latest/_autosummary/jax.distributed.initialize.html#jax.distributed.initialize) -and [jax.experimental.pjit](https://jax.readthedocs.io/en/latest/_modules/jax/experimental/pjit.html) modules. To learn -more about using `pjit` and `jax.distributed` for running multi-node models please visit JAX docs. - -Example consists of following scripts: - -- [server.py](server.py) - this file runs the Triton server (with `--rank 0`) or JAX worker (with `--host_idx` - greater than 0) on each node. It contains the code that distributes the inputs from the server to the workers. -- [client.py](client.py) - example of a simple client that calls the server with a single sample. -- [opt_utils.py](opt_utils.py) - lower level code used by [server.py](server.py). In this file we define functions that - create a sharding strategy, copy model parameters from the cpu into multiple devices and run inference. -- [modeling_flax_opt.py](modeling_flax_opt.py) - slightly - modified [HuggingFace file](https://github.com/huggingface/transformers/blob/main/src/transformers/models/opt/modeling_flax_opt.py) - with OPT model definition. The main difference is that in the HuggingFace repository the model is initialized with FP32 - weights even when the operations are in FP16. In our file we use FP16 for both storing parameters and performing - operations. - -And configurations: - -- `kubernetes` - example Helm Charts for serving and test inference in Kubernetes cluster - -Below you can find a list of available models: - -| model name | pretrained | source | -|-------------------|------------|---------------------------------------------------------| -| facebook/opt-125m | True | [HuggingFace](https://huggingface.co/facebook/opt-125m) | -| facebook/opt-350m | True | [HuggingFace](https://huggingface.co/facebook/opt-350m) | -| facebook/opt-1.3b | True | [HuggingFace](https://huggingface.co/facebook/opt-1.3b) | -| facebook/opt-2.7b | True | [HuggingFace](https://huggingface.co/facebook/opt-2.7b) | -| facebook/opt-6.7b | True | [HuggingFace](https://huggingface.co/facebook/opt-6.7b) | -| facebook/opt-13b | True | [HuggingFace](https://huggingface.co/facebook/opt-13b) | -| facebook/opt-30b | True | [HuggingFace](https://huggingface.co/facebook/opt-30b) | -| facebook/opt-66b | True | [HuggingFace](https://huggingface.co/facebook/opt-66b) | -| random/125M | False | | -| random/350M | False | | -| random/1.3B | False | | -| random/2.7B | False | | -| random/5B | False | | -| random/6.7B | False | | -| random/13B | False | | -| random/20B | False | | -| random/30B | False | | -| random/66B | False | | -| random/89B | False | | -| random/17B | False | | -| random/310B | False | | -| random/530B | False | | - -## Running example locally - -In this section we describe running the JAX on multi-GPU and/or multi-node environment where manual setup of environment -is done on each node. - -### Prerequisites - -Each node must meet following requirements: - -- [NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/overview.html). -- [NVIDIA Driver](https://docs.nvidia.com/deeplearning/frameworks/support-matrix/index.html) based on - chosen version of framework container - -### Building the Docker image - -The easiest way of running this example is inside a [nvcr.io](https://catalog.ngc.nvidia.com/containers) TensorFlow2 -container. Example `Dockerfile` that can be used to run the server: - -```Dockerfile -ARG FROM_IMAGE_NAME=nvcr.io/nvidia/tensorflow:23.10-tf2-py3 -FROM ${FROM_IMAGE_NAME} - -ENV XLA_PYTHON_CLIENT_PREALLOCATE=false -ENV NCCL_LAUNCH_MODE="PARALLEL" - -WORKDIR /workdir - -COPY install.sh . -RUN ./install.sh -RUN pip install - -COPY . . -``` - -On each node we have to build the image (or download it from a registry). - -```bash -docker build -t jax-llm:latest . -``` - -### Serving the model - -On each node run: - -```bash -docker run --net host --rm --gpus all jax-llm python server.py \ - --head-url ":" \ - --number-of-nodes \ - --rank \ - --model-name \ - --number-of-gpus -``` - -The server expects two inputs: - -- `input` - string array of shape (`batch_size`, 1), -- `output_length` - int64 array of shape (`batch-size`, 1). - -It returns a sing output: - -- `output` - string array of shape (`batch_size`, 1). - -To read more about Triton server please -visit [Triton docs](https://github.com/triton-inference-server/server#documentation). - -### Testing the inference - -To use our example client run on any machine: - -```bash -docker run --net host jax-llm python client.py \ - --server-url "http://:8000" \ - --input "" \ - --output-length -``` - -## Kubernetes example of running server on single/multiple nodes with multiple GPUs - -This section describe how to server the JAX model on Kubernetes cluster. The following prerequisites must be matched to -run the example: - -- Kubernetes cluster with NVIDIA GPU node -- [NVIDIA Device Plugin](https://github.com/NVIDIA/k8s-device-plugin) installed in Kubernetes cluster -- Docker Containers Registry accessible from Kubernetes cluster -- [Installed Helm](https://helm.sh/docs/intro/install/) for creating the deployment and test job - -Optionally you may install NVIDIA Container Toolkit and NVIDIA GPU Operator which enable more features -like [MIG](https://docs.nvidia.com/datacenter/cloud-native/gpu-operator/gpu-operator-mig.html) or -[Time Slicing](https://docs.nvidia.com/datacenter/cloud-native/gpu-operator/gpu-sharing.html) support in the cluster. -To learn more how to set up Kubernetes cluster with NVIDIA GPU you can review [ -NVIDIA Cloud Native Documentation](https://docs.nvidia.com/datacenter/cloud-native/contents.html) - -### Deployment instruction - -Below, we present a step-by-step guide assuming that **all the commands are executed from the root of repository**. - -Follow these steps to run and test example in the cluster: - -1. [Optional] Build PyTriton wheel following the [build instruction](../../docs/building.md) -2. Prepare the tag under which image is going to be pushed to your Docker Containers Registry accessible from Kubernetes - cluster. Example for local cluster (minikube, k3s) with registry hosted inside the cluster: - -```shell -export DOCKER_IMAGE_NAME_WITH_TAG=localhost:5000/jax-example:latest -``` - -3. Build and push the Docker container image to your registry: - -```shell -# Export the base image used for build. We use TensorFlow image for JAX -export FROM_IMAGE_NAME=nvcr.io/nvidia/tensorflow:23.10-tf2-py3 -./examples/huggingface_opt_multinode_jax/kubernetes/build_and_push.sh -``` -**Note**: By default the container is built using `pytriton` package from pypi.org. To build container with wheel built -locally use `export BUILD_FROM=dist` before executing script. - -4. At this point there are 2 options to deploy the model depending on the size of the model: - a) Install the Helm Chart with deployment and service for single-node: - -```shell -helm upgrade -i --set deployment.image=${DOCKER_IMAGE_NAME_WITH_TAG} \ ---set deployment.numOfGPUs=1 \ -jax-example \ -./examples/huggingface_opt_multinode_jax/kubernetes/single-node -``` - -b) Install the Helm Chart with deployment and service for multi-node: - -**Important**: Running multi-node requires to create Persistent Volume Claim in the cluster shared between PODs. You can -pass name as argument to Helm Chart during installation. Read more how to create -[Persistent Volume Claim](#creating-persistent-volume-claim). - -**Please note**: The multi-node deployment for scaling requires improved configuration of services and load balancing. - -```shell -helm upgrade -i --set statefulset.image=${DOCKER_IMAGE_NAME_WITH_TAG} \ ---set statefulset.persistentVolumeClaim=llm-cache-pvc \ ---set statefulset.numOfNodes=3 \ ---set statefulset.numOfGPUs=1 \ -jax-example \ -./examples/huggingface_opt_multinode_jax/kubernetes/multi-node -``` - -5. Install the Helm Chart with client test - -```shell -helm install --set image=${DOCKER_IMAGE_NAME_WITH_TAG} \ -jax-example-test \ -./examples/huggingface_opt_multinode_jax/kubernetes/test -``` - -Now, you can review the logs from the running PODs to verify the inference is running. To show the logs from cluster -for given POD first list all running pods: - -```shell -kubectl get pods -``` - -Next show logs from server or client: - -```shell -kubectl logs {NAME} -``` - -To remove the installed charts simply run: - -```shell -helm uninstall jax-example-test -helm uninstall jax-example -``` - -### Creating Persistent Volume Claim - -This section describe how to create Persistent Volume Claim in Kuberenetes cluster using CSI or NFS drive. - -#### Using CSI host path - -When you are running on local machine (ex. Minikube or k3s) you can use CSI host path to create a persistent volume -claim. Make sure that appropriate extension for your cluster has been installed and run: - -```shell -kubectl apply -f ./examples/huggingface_opt_multinode_jax/kubernetes/persistent-volume-claim-csi.yaml -``` - -#### Using NFS disk - -When you are running Kubernetes cluster in Cloud Service Provider you can create persistent volume claim using NFS disk. - -First, create the NFS disk and obtain its IP address. Make sure the disk is in the same network as Kubernetes cluster. -The pre-defined file share name for the NFS storage is `llm`. - -Next modify the `./examples/huggingface_opt_multinode_jax/kubernetes/persistent-volume-claim-nfs.yaml` file and update the -`{IP}` value. Then run: - -```shell -kubectl apply -f ./examples/huggingface_opt_multinode_jax/kubernetes/persistent-volume-nfs.yaml -``` - -Once the persistent volume is ready the claim can be created using: -```shell -kubectl apply -f ./examples/huggingface_opt_multinode_jax/kubernetes/persistent-volume-claim-nfs.yaml -``` \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/client.py b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/client.py deleted file mode 100644 index 63433dad61474b30a42e05e0bf08e8f0505bb3fb..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/client.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import argparse -import logging - -import numpy as np - -from pytriton.client import ModelClient - -TRITON_MODEL_NAME = "OPT" - -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") -LOGGER = logging.getLogger("jax.client") -LOGGER.setLevel(level=logging.INFO) - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument("--server-url", type=str, default="http://localhost:8000", help="server address") - parser.add_argument("--input", type=str, required=True, help="input text") - parser.add_argument("--output-length", type=int, required=True, help="output length") - args = parser.parse_args() - - np.random.seed(0) - - output_len = np.array([args.output_length], dtype=np.int64) - inputs = np.array([args.input]) - inputs = np.char.encode(inputs, "utf-8") - - LOGGER.info(f"output_len.shape={output_len.shape}, inputs.shape={inputs.shape}") - - LOGGER.info(f"Initializing client to address {args.server_url}") - with ModelClient(args.server_url, model_name=TRITON_MODEL_NAME) as client: - LOGGER.info("Sending request") - LOGGER.info(f" Inputs: {inputs}") - LOGGER.info(f" Output length: {output_len}") - result_dict = client.infer_sample(inputs, output_len) - - LOGGER.info("Received results:") - for output_name, output_data in result_dict.items(): - LOGGER.info(f"{output_name}: {[b.decode() for b in output_data.tolist()]}") - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/install.sh b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/install.sh deleted file mode 100644 index 87a2f445f2d4d60f46d802b2812e8c0210b33e95..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/install.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -set -xe - -# Use 0.4.14 raises error -pip install --upgrade "jax[cuda12_pip]!=0.4.14" -f https://storage.googleapis.com/jax-releases/jax_cuda_releases.html -pip install --upgrade flax omegaconf sacrebleu SentencePiece tokenizers "transformers>=4.26" diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/Dockerfile b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/Dockerfile deleted file mode 100644 index 3561e60ee9206b86e080c9d639a08ba47028f84d..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/Dockerfile +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -ARG FROM_IMAGE_NAME=nvcr.io/nvidia/tensorflow:23.10-tf2-py3 -ARG BUILD_FROM=pypi - -FROM ${FROM_IMAGE_NAME} as base -WORKDIR /opt/app - -# Use when build PyTriton from source -FROM base as install-from-dist -COPY dist/*.whl /opt/app -RUN pip install /opt/app/*.whl - -# Install from pypi -FROM base as install-from-pypi -RUN pip install -U nvidia-pytriton - -FROM install-from-${BUILD_FROM} AS image - -ENV XLA_PYTHON_CLIENT_PREALLOCATE=false -ENV NCCL_LAUNCH_MODE="PARALLEL" -ENV PYTHONUNBUFFERED=1 - -RUN apt update -y && apt install -y dnsutils - -COPY examples/huggingface_opt_multinode_jax/install.sh /opt/app -RUN /opt/app/install.sh - -COPY examples/huggingface_opt_multinode_jax/client.py /opt/app -COPY examples/huggingface_opt_multinode_jax/server.py /opt/app -COPY examples/huggingface_opt_multinode_jax/modeling_flax_opt.py /opt/app -COPY examples/huggingface_opt_multinode_jax/opt_utils.py /opt/app -COPY examples/huggingface_opt_multinode_jax/kubernetes/run.sh /opt/app -COPY examples/huggingface_opt_multinode_jax/kubernetes/health.sh /opt/app - -ENTRYPOINT [] diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/build_and_push.sh b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/build_and_push.sh deleted file mode 100644 index 9940bef73efe98b853bbdc886632ef7b9a69de61..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/build_and_push.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -if [ -z ${DOCKER_IMAGE_NAME_WITH_TAG} ]; then - echo "Provide Docker image name under to push the created image to your registry" - echo "Example:" - echo " export DOCKER_IMAGE_NAME_WITH_TAG=my-registry:5000/jax-example:latest" - exit 1 -fi - -if [ -z ${FROM_IMAGE_NAME} ]; then - echo "Provide Docker image that would be used as base image" - echo "Example:" - echo " export FROM_IMAGE_NAME=nvcr.io/nvidia/jax:22.07" - exit 1 -fi - -BUILD_FROM="${BUILD_FROM:-pypi}" -if [[ ${BUILD_FROM} != "pypi" ]] && [[ ${BUILD_FROM} != "dist" ]]; then - echo "The BUILD_FROM variable should be equal to 'pypi' or 'dist'" - echo "Example:" - echo " export BUILD_FROM=dist" - exit 1 -fi - -set -xe - -docker build -f examples/huggingface_opt_multinode_jax/kubernetes/Dockerfile \ - -t ${DOCKER_IMAGE_NAME_WITH_TAG} \ - --build-arg FROM_IMAGE_NAME=${FROM_IMAGE_NAME} \ - --build-arg BUILD_FROM=${BUILD_FROM} . -docker push ${DOCKER_IMAGE_NAME_WITH_TAG} \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/health.sh b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/health.sh deleted file mode 100644 index a70847b57821035ec7c3e8da7dac52478dc803c7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/health.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#!/bin/bash -set -xe - -RANK=${HOSTNAME##*-} - -if [[ "${RANK}" == "0" ]]; -then - # For head node - validate if Triton Server is running - curl --fail localhost:8000/v2/health/live -else - # For workers - validate the process is running - cat /tmp/health -fi \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/multi-node/Chart.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/multi-node/Chart.yaml deleted file mode 100644 index 81960fe23da635c920086b2e429e0aec29b1825c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/multi-node/Chart.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v2 -description: JAX Multi Node Example -name: jax-example -version: 0.0.1 -appVersion: 0.0.1 diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/multi-node/templates/_helpers.tpl b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/multi-node/templates/_helpers.tpl deleted file mode 100644 index 79274ecc63d15d74bdd0b20f9fcf67a6666ee3e6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/multi-node/templates/_helpers.tpl +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -{{/* vim: set filetype=mustache: */}} -{{/* -Expand the name of the chart. -*/}} -{{- define "selector.name" -}} -{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} -{{- end -}} - -{{/* -Create a default fully qualified app name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -If release name contains chart name it will be used as a full name. -*/}} -{{- define "selector.fullname" -}} -{{- if .Values.fullnameOverride -}} -{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- $name := default .Chart.Name .Values.nameOverride -}} -{{- if contains $name .Release.Name -}} -{{- .Release.Name | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} -{{- end -}} -{{- end -}} -{{- end -}} - -{{/* -Create chart name and version as used by the chart label. -*/}} -{{- define "selector.chart" -}} -{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} -{{- end -}} diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/multi-node/templates/headless.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/multi-node/templates/headless.yaml deleted file mode 100644 index cd1ce000bfc296a3162332ce13642d65f4fc1fe9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/multi-node/templates/headless.yaml +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -kind: Service -metadata: - name: {{ template "selector.fullname" . }} - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - selector: - app: {{ template "selector.name" . }} - release: {{ .Release.Name }} - clusterIP: None - ports: - - port: 8000 - targetPort: 8000 - name: http - - port: 8001 - targetPort: 8001 - name: grpc - - port: 8002 - targetPort: 8002 - name: metrics - - port: {{ .Values.jaxPort }} - name: jax - - port: {{ .Values.socketPort }} - name: socket - - diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/multi-node/templates/service.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/multi-node/templates/service.yaml deleted file mode 100644 index b51eb5eeb153b4b0fcc6571fc06cb2751d858845..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/multi-node/templates/service.yaml +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -kind: Service -metadata: - name: {{ template "selector.fullname" . }}-service - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - type: {{ .Values.service.type }} - selector: - statefulset.kubernetes.io/pod-name: {{ template "selector.name" . }}-0 - ports: - - port: 8000 - targetPort: 8000 - name: http - - port: 8001 - targetPort: 8001 - name: grpc - - port: 8002 - targetPort: 8002 - name: metrics diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/multi-node/templates/statefulset.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/multi-node/templates/statefulset.yaml deleted file mode 100644 index 385068883e49b689732553f12c86f68f8182065d..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/multi-node/templates/statefulset.yaml +++ /dev/null @@ -1,108 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: StatefulSet -metadata: - name: {{ template "selector.fullname" . }} - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{ template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - selector: - matchLabels: - app: {{ template "selector.name" . }} - release: {{ .Release.Name }} - serviceName: "{{ template "selector.fullname" . }}" - replicas: {{ mul .Values.replicaCount .Values.statefulset.numOfNodes }} - minReadySeconds: 5 - template: - metadata: - labels: - app: {{ template "selector.name" . }} - release: {{.Release.Name}} - annotations: - date: "{{ now | unixEpoch }}" - spec: - terminationGracePeriodSeconds: 10 - {{- if .Values.imagePullSecret }} - imagePullSecrets: - - name: {{.Values.imagePullSecret }} - {{ end }} - restartPolicy: {{ .Values.restartPolicy }} - containers: - - name: {{ .Chart.Name }} - image: {{ .Values.statefulset.image }} - imagePullPolicy: {{ .Values.pullPolicy }} - command: ["bash", "-c"] - args: ["/opt/app/run.sh"] - env: - - name: PYTHONUNBUFFERED - value: "1" - - name: MODEL_ID - value: {{ .Values.statefulset.modelId }} - - name: PVC_CACHE - value: /mnt/data - - name: NUMBER_OF_NODES - value: "{{ .Values.statefulset.numOfNodes }}" - - name: NUMBER_OF_GPUS - value: "{{ .Values.statefulset.numOfGPUs }}" - - name: POD_NAME - value: {{ template "selector.name" . }} - - name: CLUSTER_NAME - value: {{ .Values.statefulset.clusterName }} - - name: DELAY - value: "15" - - name: PORT - value: "{{ .Values.jaxPort }}" - - name: SOCKET_PORT - value: "{{ .Values.socketPort }}" - ports: - - containerPort: 8000 - name: http - - containerPort: 8001 - name: grpc - - containerPort: 8002 - name: metrics - - containerPort: {{ .Values.jaxPort }} - name: jax - - containerPort: {{ .Values.socketPort }} - name: socket - livenessProbe: - exec: - command: - - bash - - /opt/app/health.sh - initialDelaySeconds: {{ .Values.statefulset.initialDelaySeconds }} - periodSeconds: 10 - resources: - requests: - nvidia.com/gpu: {{ .Values.statefulset.numOfGPUs }} - limits: - nvidia.com/gpu: {{ .Values.statefulset.numOfGPUs }} - volumeMounts: - - name: pvc - mountPath: /mnt/data - - name: shared-memory - mountPath: /dev/shm - volumes: - - name: pvc - persistentVolumeClaim: - claimName: {{ .Values.statefulset.persistentVolumeClaim }} - readOnly: false - - name: shared-memory - emptyDir: - medium: Memory diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/multi-node/values.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/multi-node/values.yaml deleted file mode 100644 index 6ff8bb6bfafdb8c5f80e83c20b571dc215db1ae2..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/multi-node/values.yaml +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -imagePullSecret: null -pullPolicy: Always -restartPolicy: Always -replicaCount: 1 -jaxPort: 12345 -socketPort: 65432 -statefulset: - image: null - numOfGPUs: 1 - numOfNodes: 3 - persistentVolumeClaim: llm-cache-pvc - modelId: facebook/opt-1.3b - clusterName: default.svc.cluster.local - initialDelaySeconds: 180 -service: - type: LoadBalancer diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/persistent-volume-claim-csi.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/persistent-volume-claim-csi.yaml deleted file mode 100644 index e91d11d54c7db37b0d0bee40cf17b9f25871c97c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/persistent-volume-claim-csi.yaml +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: llm-cache-pvc -spec: - accessModes: - - ReadWriteOnce - resources: - requests: - storage: 50Gi - storageClassName: csi-hostpath-sc diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/persistent-volume-claim-nfs.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/persistent-volume-claim-nfs.yaml deleted file mode 100644 index dac2c59823f2feebefa7d73aecc3fa5aee023313..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/persistent-volume-claim-nfs.yaml +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: llm-cache-pvc -spec: - accessModes: - - ReadWriteMany - storageClassName: "" - volumeName: llm-cache - resources: - requests: - storage: 2.5T \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/persistent-volume-nfs.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/persistent-volume-nfs.yaml deleted file mode 100644 index 75db422137a72fbb0519c9b62e481a670a3dd97e..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/persistent-volume-nfs.yaml +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -kind: PersistentVolume -metadata: - name: llm-cache -spec: - capacity: - storage: 2.5T - accessModes: - - ReadWriteMany - nfs: - path: /llm - server: {IP} diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/run.sh b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/run.sh deleted file mode 100644 index 0b1fe59892a9e1c2faf64da468724cd35e80aa62..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/run.sh +++ /dev/null @@ -1,64 +0,0 @@ -#!/bin/bash -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#!/bin/bash -set -xe - -HEALTH_FILE=/tmp/health - -# Define cleanup method -function cleanup() -{ - rm -f ${HEALTH_FILE} -} - -# Create health check file -touch ${HEALTH_FILE} - -# Clean file on script exit -trap cleanup SIGINT SIGTERM ERR EXIT - -# Initial delay to mark POD as health -sleep ${DELAY} - -# Initialize head node information -if [ -z ${POD_NAME} ]; -then - RANK=0 - ADDRESS=localhost -else - POD_ID=${HOSTNAME##*-} - RANK=$((${POD_ID} % ${NUMBER_OF_NODES})) - HEAD_RANK=$((${POD_ID} / ${NUMBER_OF_NODES} * ${NUMBER_OF_NODES})) - ADDRESS=$(dig +short ${POD_NAME}-${HEAD_RANK}.${POD_NAME}.${CLUSTER_NAME}) -fi - -# Display node info and head address -echo "RANK: ${RANK}" -echo "HEAD ADDRESS: ${ADDRESS}" - -# Append cache flags -if [ -n "${PVC_CACHE}" ]; -then -echo "Initializing cache in shared volume ${PVC_CACHE}" -export CACHE_OPTIONS="--cache-dir /mnt/data/cache" -fi - -python /opt/app/server.py --head-url ${ADDRESS}:${PORT} \ ---number-of-nodes ${NUMBER_OF_NODES} \ ---rank ${RANK} \ ---model-name ${MODEL_ID} \ ---number-of-gpus ${NUMBER_OF_GPUS} \ ---socket-port ${SOCKET_PORT} \ ---verbose ${CACHE_OPTIONS} \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/single-node/Chart.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/single-node/Chart.yaml deleted file mode 100644 index 39aea54bcee6709a62bf3aaaa04d8a3860da503b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/single-node/Chart.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v2 -description: JAX Single Node Example -name: jax-example -version: 0.0.1 -appVersion: 0.0.1 diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/single-node/templates/_helpers.tpl b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/single-node/templates/_helpers.tpl deleted file mode 100644 index 79274ecc63d15d74bdd0b20f9fcf67a6666ee3e6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/single-node/templates/_helpers.tpl +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -{{/* vim: set filetype=mustache: */}} -{{/* -Expand the name of the chart. -*/}} -{{- define "selector.name" -}} -{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} -{{- end -}} - -{{/* -Create a default fully qualified app name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -If release name contains chart name it will be used as a full name. -*/}} -{{- define "selector.fullname" -}} -{{- if .Values.fullnameOverride -}} -{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- $name := default .Chart.Name .Values.nameOverride -}} -{{- if contains $name .Release.Name -}} -{{- .Release.Name | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} -{{- end -}} -{{- end -}} -{{- end -}} - -{{/* -Create chart name and version as used by the chart label. -*/}} -{{- define "selector.chart" -}} -{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} -{{- end -}} diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/single-node/templates/deployment.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/single-node/templates/deployment.yaml deleted file mode 100644 index b5aff0bd78501f59890836888ee00bb1bc533b54..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/single-node/templates/deployment.yaml +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: {{ .Release.Name }} - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{ template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - replicas: {{ .Values.replicaCount }} - selector: - matchLabels: - app: {{ template "selector.name" . }} - release: {{ .Release.Name }} - template: - metadata: - labels: - app: {{ template "selector.name" . }} - release: {{.Release.Name}} - annotations: - date: "{{ now | unixEpoch }}" - spec: - {{- if .Values.imagePullSecret }} - imagePullSecrets: - - name: {{.Values.imagePullSecret }} - {{ end }} - restartPolicy: {{ .Values.restartPolicy }} - containers: - - name: {{ .Chart.Name }} - image: {{ .Values.deployment.image }} - imagePullPolicy: {{ .Values.pullPolicy }} - command: ["bash", "-c"] - args: ["/opt/app/run.sh"] - env: - - name: PYTHONUNBUFFERED - value: "1" - - name: MODEL_ID - value: {{ .Values.deployment.modelId }} - - name: NUMBER_OF_NODES - value: "1" - - name: NUMBER_OF_GPUS - value: "{{ .Values.deployment.numOfGPUs }}" - - name: DELAY - value: "0" - - name: PORT - value: "{{ .Values.jaxPort }}" - - name: SOCKET_PORT - value: "{{ .Values.socketPort }}" - ports: - - containerPort: 8000 - name: http - - containerPort: 8001 - name: grpc - - containerPort: 8002 - name: metrics - livenessProbe: - initialDelaySeconds: 60 - periodSeconds: 10 - httpGet: - path: /v2/health/live - port: http - readinessProbe: - initialDelaySeconds: {{ .Values.deployment.initialDelaySeconds }} - periodSeconds: 10 - httpGet: - path: /v2/health/ready - port: http - resources: - requests: - nvidia.com/gpu: {{ .Values.deployment.numOfGPUs }} - limits: - nvidia.com/gpu: {{ .Values.deployment.numOfGPUs }} - volumeMounts: - - name: shared-memory - mountPath: /dev/shm - volumes: - - name: shared-memory - emptyDir: - medium: Memory diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/single-node/templates/service.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/single-node/templates/service.yaml deleted file mode 100644 index 79d7c7f62cd8d714edbd6aec8721d7691007d7c8..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/single-node/templates/service.yaml +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -kind: Service -metadata: - name: {{ template "selector.fullname" . }}-service - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - type: {{ .Values.service.type }} - ports: - - port: 8000 - targetPort: 8000 - name: http - - port: 8001 - targetPort: 8001 - name: grpc - - port: 8002 - targetPort: 8002 - name: metrics - selector: - app: {{ template "selector.name" . }} - release: {{ .Release.Name }} diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/single-node/values.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/single-node/values.yaml deleted file mode 100644 index 0106b110956707a4ccbd4b8a5c79dec650557ac2..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/single-node/values.yaml +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -imagePullSecret: null -pullPolicy: Always -restartPolicy: Always -replicaCount: 1 -jaxPort: 12345 -socketPort: 65432 -deployment: - image: null - numOfGPUs: 1 - modelId: facebook/opt-1.3b - initialDelaySeconds: 180 -service: - type: LoadBalancer diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/test/Chart.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/test/Chart.yaml deleted file mode 100644 index bb45bf737dc5a360790473f3f7f7a2b5dfc8b815..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/test/Chart.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v2 -description: JAX Example Test -name: jax-example-test -version: 0.0.1 -appVersion: 0.0.1 diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/test/templates/_helpers.tpl b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/test/templates/_helpers.tpl deleted file mode 100644 index 79274ecc63d15d74bdd0b20f9fcf67a6666ee3e6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/test/templates/_helpers.tpl +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -{{/* vim: set filetype=mustache: */}} -{{/* -Expand the name of the chart. -*/}} -{{- define "selector.name" -}} -{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} -{{- end -}} - -{{/* -Create a default fully qualified app name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -If release name contains chart name it will be used as a full name. -*/}} -{{- define "selector.fullname" -}} -{{- if .Values.fullnameOverride -}} -{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- $name := default .Chart.Name .Values.nameOverride -}} -{{- if contains $name .Release.Name -}} -{{- .Release.Name | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} -{{- end -}} -{{- end -}} -{{- end -}} - -{{/* -Create chart name and version as used by the chart label. -*/}} -{{- define "selector.chart" -}} -{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} -{{- end -}} diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/test/templates/job.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/test/templates/job.yaml deleted file mode 100644 index e22e947854eaca5eb359ec52a67a8afb90574d03..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/test/templates/job.yaml +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: batch/v1 -kind: Job -metadata: - name: {{ .Release.Name }}-deployment - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{ template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - template: - metadata: - labels: - app: {{ template "selector.name" . }} - release: {{.Release.Name}} - annotations: - date: "{{ now | unixEpoch }}" - spec: - {{- if .Values.imagePullSecret }} - imagePullSecrets: - - name: {{.Values.imagePullSecret }} - {{ end }} - containers: - - name: {{ .Chart.Name }} - image: {{ .Values.image }} - imagePullPolicy: {{ .Values.pullPolicy }} - command: [ "bash", "-c" ] - args: [ "while true; do python /opt/app/client.py --server-url ${SERVICE_URL} --input 'this is test' --output-length 16; sleep 1; done;" ] - env: - - name: PYTHONUNBUFFERED - value: "1" - - name: SERVICE_URL - value: {{ .Values.serviceUrl }} - restartPolicy: {{ .Values.restartPolicy }} - backoffLimit: {{ .Values.backoffLimit }} diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/test/values.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/test/values.yaml deleted file mode 100644 index 3e11d8f3f25ab365633d7fb489ac61c0139a147e..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/kubernetes/test/values.yaml +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -imagePullSecret: null -pullPolicy: Always -restartPolicy: OnFailure -backoffLimit: 4 -image: null -serviceUrl: "http://jax-example-service:8000" \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/modeling_flax_opt.py b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/modeling_flax_opt.py deleted file mode 100644 index 7ef1acd57161c99b036bde54815d894967d8f399..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/modeling_flax_opt.py +++ /dev/null @@ -1,815 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# Copyright 2022-2023 The Fairseq Authors and The Google Flax Team Authors And The HuggingFace Inc. team. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" Flax OPT model.""" - -# pytype: disable=import-error,annotation-type-mismatch,bad-return-type - -import logging -from functools import partial -from typing import Optional, Tuple - -import flax.linen as nn -import jax -import jax.numpy as jnp -from flax.core.frozen_dict import FrozenDict, freeze, unfreeze -from flax.linen import combine_masks, make_causal_mask -from flax.linen.attention import dot_product_attention_weights -from flax.traverse_util import flatten_dict, unflatten_dict -from jax import lax -from jax.random import PRNGKey -from transformers.modeling_flax_outputs import FlaxBaseModelOutput, FlaxMaskedLMOutput -from transformers.modeling_flax_utils import ACT2FN, FlaxPreTrainedModel, append_call_sample_docstring -from transformers.models.opt.configuration_opt import OPTConfig -from transformers.utils import add_start_docstrings - -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") -LOGGER = logging.getLogger("jax.modeling_flax_opt") -LOGGER.setLevel(level=logging.INFO) - -_CHECKPOINT_FOR_DOC = "facebook/opt-350m" -_CONFIG_FOR_DOC = "OPTConfig" - - -OPT_START_DOCSTRING = r""" - This model inherits from [`FlaxPreTrainedModel`]. Check the superclass documentation for the generic methods the - library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads - etc.) - - This model is also a Flax Linen - [flax.nn.Module](https://flax.readthedocs.io/en/latest/_autosummary/flax.nn.module.html) subclass. Use it as a - regular Flax Module and refer to the Flax documentation for all matter related to general usage and behavior. - - Finally, this model supports inherent JAX features such as: - - - [Just-In-Time (JIT) compilation](https://jax.readthedocs.io/en/latest/jax.html#just-in-time-compilation-jit) - - [Automatic Differentiation](https://jax.readthedocs.io/en/latest/jax.html#automatic-differentiation) - - [Vectorization](https://jax.readthedocs.io/en/latest/jax.html#vectorization-vmap) - - [Parallelization](https://jax.readthedocs.io/en/latest/jax.html#parallelization-pmap) - - Parameters: - config ([`OPTConfig`]): Model configuration class with all the parameters of the model. - Initializing with a config file does not load the weights associated with the model, only the - configuration. Check out the [`~FlaxPreTrainedModel.from_pretrained`] method to load the model weights. - dtype (`jax.numpy.dtype`, *optional*, defaults to `jax.numpy.float32`): - The data type of the computation. Can be one of `jax.numpy.float32`, `jax.numpy.float16` (on GPUs) and - `jax.numpy.bfloat16` (on TPUs). - - This can be used to enable mixed-precision training or half-precision inference on GPUs or TPUs. If - specified all the computation will be performed with the given `dtype`. - - **Note that this only specifies the dtype of the computation and does not influence the dtype of model - parameters.** - - If you wish to change the dtype of the model parameters, see [`~FlaxPreTrainedModel.to_fp16`] and - [`~FlaxPreTrainedModel.to_bf16`]. -""" - -OPT_INPUTS_DOCSTRING = r""" - Args: - input_ids (`jnp.ndarray` of shape `(batch_size, sequence_length)`): - Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide - it. - - Indices can be obtained using [`GPT2Tokenizer`]. See [`PreTrainedTokenizer.encode`] and - [`PreTrainedTokenizer.__call__`] for details. - - [What are input IDs?](../glossary#input-ids) - attention_mask (`jnp.ndarray` of shape `(batch_size, sequence_length)`, *optional*): - Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - - - 1 for tokens that are **not masked**, - - 0 for tokens that are **masked**. - - [What are attention masks?](../glossary#attention-mask) - position_ids (`numpy.ndarray` of shape `(batch_size, sequence_length)`, *optional*): - Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, - config.max_position_embeddings - 1]`. - output_attentions (`bool`, *optional*): - Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned - tensors for more detail. - output_hidden_states (`bool`, *optional*): - Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for - more detail. - return_dict (`bool`, *optional*): - Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. -""" - - -# Copied from transformers.models.bart.modeling_flax_bart.FlaxBartAttention with Bart->OPT -class FlaxOPTAttention(nn.Module): - config: OPTConfig - embed_dim: int - num_heads: int - dropout: float = 0.0 - causal: bool = False - bias: bool = True - dtype: jnp.dtype = jnp.float32 # the dtype of the computation - - def setup(self) -> None: - self.head_dim = self.embed_dim // self.num_heads - if self.head_dim * self.num_heads != self.embed_dim: - raise ValueError( - f"embed_dim must be divisible by num_heads (got `embed_dim`: {self.embed_dim}" - f" and `num_heads`: {self.num_heads})." - ) - - dense = partial( - nn.Dense, - self.embed_dim, - use_bias=self.bias, - dtype=self.dtype, - param_dtype=self.dtype, - kernel_init=jax.nn.initializers.normal(self.config.init_std, dtype=self.dtype), - ) - - self.q_proj, self.k_proj, self.v_proj = dense(), dense(), dense() - self.out_proj = dense() - - self.dropout_layer = nn.Dropout(rate=self.dropout) - - if self.causal: - self.causal_mask = make_causal_mask( - jnp.ones((1, self.config.max_position_embeddings), dtype="bool"), dtype="bool" - ) - - def _split_heads(self, hidden_states): - return hidden_states.reshape(hidden_states.shape[:2] + (self.num_heads, self.head_dim)) - - def _merge_heads(self, hidden_states): - return hidden_states.reshape(hidden_states.shape[:2] + (self.embed_dim,)) - - @nn.compact - def _concatenate_to_cache(self, key, value, query, attention_mask): - """ - This function takes projected key, value states from a single input token and concatenates the states to cached - states from previous steps. This function is slighly adapted from the official Flax repository: - https://github.com/google/flax/blob/491ce18759622506588784b4fca0e4bf05f8c8cd/flax/linen/attention.py#L252 - """ - # detect if we're initializing by absence of existing cache data. - is_initialized = self.has_variable("cache", "cached_key") - cached_key = self.variable("cache", "cached_key", jnp.zeros, key.shape, key.dtype) - cached_value = self.variable("cache", "cached_value", jnp.zeros, value.shape, value.dtype) - cache_index = self.variable("cache", "cache_index", lambda: jnp.array(0, dtype=jnp.int32)) - - if is_initialized: - *batch_dims, max_length, num_heads, depth_per_head = cached_key.value.shape - # update key, value caches with our new 1d spatial slices - cur_index = cache_index.value - indices = (0,) * len(batch_dims) + (cur_index, 0, 0) - key = lax.dynamic_update_slice(cached_key.value, key, indices) - value = lax.dynamic_update_slice(cached_value.value, value, indices) - cached_key.value = key - cached_value.value = value - num_updated_cache_vectors = query.shape[1] - cache_index.value = cache_index.value + num_updated_cache_vectors - # causal mask for cached decoder self-attention: our single query position should only attend to those key positions that have already been generated and cached, not the remaining zero elements. - pad_mask = jnp.broadcast_to( - jnp.arange(max_length) < cur_index + num_updated_cache_vectors, - tuple(batch_dims) + (1, num_updated_cache_vectors, max_length), - ) - attention_mask = combine_masks(pad_mask, attention_mask) - return key, value, attention_mask - - def __call__( - self, - hidden_states: jnp.ndarray, - key_value_states: Optional[jnp.ndarray] = None, - attention_mask: Optional[jnp.ndarray] = None, - init_cache: bool = False, - deterministic: bool = True, - ) -> Tuple[jnp.ndarray]: - """Input shape: Batch x Time x Channel""" - - # if key_value_states are provided this layer is used as a cross-attention layer - # for the decoder - is_cross_attention = key_value_states is not None - batch_size = hidden_states.shape[0] - - # get query proj - query_states = self.q_proj(hidden_states) - # get key, value proj - if is_cross_attention: - # cross_attentions - key_states = self.k_proj(key_value_states) - value_states = self.v_proj(key_value_states) - else: - # self_attention - key_states = self.k_proj(hidden_states) - value_states = self.v_proj(hidden_states) - - query_states = self._split_heads(query_states) - key_states = self._split_heads(key_states) - value_states = self._split_heads(value_states) - - # handle cache prepare causal attention mask - if self.causal: - query_length, key_length = query_states.shape[1], key_states.shape[1] - if self.has_variable("cache", "cached_key"): - mask_shift = self.variables["cache"]["cache_index"] - max_decoder_length = self.variables["cache"]["cached_key"].shape[1] - causal_mask = lax.dynamic_slice( - self.causal_mask, (0, 0, mask_shift, 0), (1, 1, query_length, max_decoder_length) - ) - else: - causal_mask = self.causal_mask[:, :, :query_length, :key_length] - causal_mask = jnp.broadcast_to(causal_mask, (batch_size,) + causal_mask.shape[1:]) - - # combine masks if needed - if attention_mask is not None and self.causal: - attention_mask = jnp.broadcast_to(jnp.expand_dims(attention_mask, axis=(-3, -2)), causal_mask.shape) - attention_mask = combine_masks(attention_mask, causal_mask) - elif self.causal: - attention_mask = causal_mask - elif attention_mask is not None: - attention_mask = jnp.expand_dims(attention_mask, axis=(-3, -2)) - - # During fast autoregressive decoding, we feed one position at a time, - # and cache the keys and values step by step. - if self.causal and (self.has_variable("cache", "cached_key") or init_cache): - key_states, value_states, attention_mask = self._concatenate_to_cache( - key_states, value_states, query_states, attention_mask - ) - - # Convert the boolean attention mask to an attention bias. - if attention_mask is not None: - # attention mask in the form of attention bias - attention_bias = lax.select( - attention_mask > 0, - jnp.full(attention_mask.shape, 0.0).astype(self.dtype), - jnp.full(attention_mask.shape, float("-inf")).astype(self.dtype), - ) - else: - attention_bias = None - - dropout_rng = None - if not deterministic and self.dropout > 0.0: - dropout_rng = self.make_rng("dropout") - - attn_weights = dot_product_attention_weights( - query_states, - key_states, - bias=attention_bias, - dropout_rng=dropout_rng, - dropout_rate=self.dropout, - broadcast_dropout=True, - deterministic=deterministic, - dtype=self.dtype, - precision=None, - ) - - attn_output = jnp.einsum("...hqk,...khd->...qhd", attn_weights, value_states) - attn_output = self._merge_heads(attn_output) - attn_output = self.out_proj(attn_output) - - return attn_output, attn_weights - - -class FlaxOPTDecoderLayer(nn.Module): - config: OPTConfig - dtype: jnp.dtype = jnp.float32 - - def setup(self) -> None: - self.embed_dim = self.config.hidden_size - self.self_attn = FlaxOPTAttention( - config=self.config, - embed_dim=self.embed_dim, - num_heads=self.config.num_attention_heads, - dropout=self.config.attention_dropout, - causal=True, - dtype=self.dtype, - ) - self.do_layer_norm_before = self.config.do_layer_norm_before - self.dropout_layer = nn.Dropout(rate=self.config.dropout) - self.activation_fn = ACT2FN[self.config.activation_function] - - self.self_attn_layer_norm = nn.LayerNorm( - dtype=self.dtype, - epsilon=1e-05, - param_dtype=self.dtype, - ) - self.fc1 = nn.Dense( - self.config.ffn_dim, - dtype=self.dtype, - param_dtype=self.dtype, - kernel_init=jax.nn.initializers.normal(self.config.init_std, dtype=self.dtype), - ) - self.fc2 = nn.Dense( - self.embed_dim, - dtype=self.dtype, - param_dtype=self.dtype, - kernel_init=jax.nn.initializers.normal(self.config.init_std, dtype=self.dtype), - ) - self.final_layer_norm = nn.LayerNorm(dtype=self.dtype, epsilon=1e-05, param_dtype=self.dtype) - - def __call__( - self, - hidden_states: jnp.ndarray, - attention_mask: jnp.ndarray, - init_cache: bool = False, - output_attentions: bool = True, - deterministic: bool = True, - ) -> Tuple[jnp.ndarray]: - residual = hidden_states - - # 125m, 1.7B, ..., 175B applies layer norm BEFORE attention - if self.do_layer_norm_before: - hidden_states = self.self_attn_layer_norm(hidden_states) - - # Self Attention - hidden_states, self_attn_weights = self.self_attn( - hidden_states=hidden_states, - attention_mask=attention_mask, - init_cache=init_cache, - deterministic=deterministic, - ) - hidden_states = self.dropout_layer(hidden_states, deterministic=deterministic) - hidden_states = residual + hidden_states - # 350m applies layer norm AFTER attention - if not self.do_layer_norm_before: - hidden_states = self.self_attn_layer_norm(hidden_states) - - # Fully Connected - hidden_states_shape = hidden_states.shape - hidden_states = hidden_states.reshape(-1, hidden_states.shape[-1]) - residual = hidden_states - - # 125m, 1.7B, ..., 175B applies layer norm BEFORE attention - if self.do_layer_norm_before: - hidden_states = self.final_layer_norm(hidden_states) - - hidden_states = self.fc1(hidden_states) - hidden_states = self.activation_fn(hidden_states) - - hidden_states = self.fc2(hidden_states) - hidden_states = self.dropout_layer(hidden_states, deterministic=deterministic) - - hidden_states = (residual + hidden_states).reshape(hidden_states_shape) - - # 350m applies layer norm AFTER attention - if not self.do_layer_norm_before: - hidden_states = self.final_layer_norm(hidden_states) - - outputs = (hidden_states,) - - if output_attentions: - outputs += (self_attn_weights,) - - return outputs - - -class FlaxOPTDecoderLayerCollection(nn.Module): - config: OPTConfig - dtype: jnp.dtype = jnp.float32 # the dtype of the computation - - def setup(self): - self.layers = [ - FlaxOPTDecoderLayer(self.config, name=str(i), dtype=self.dtype) - for i in range(self.config.num_hidden_layers) - ] - self.layerdrop = self.config.layerdrop - - def __call__( - self, - hidden_states, - attention_mask, - deterministic: bool = True, - init_cache: bool = False, - output_attentions: bool = False, - output_hidden_states: bool = False, - ): - # decoder layers - all_hidden_states = () if output_hidden_states else None - all_self_attns = () if output_attentions else None - - for decoder_layer in self.layers: - if output_hidden_states: - all_hidden_states += (hidden_states,) - - layer_outputs = decoder_layer( - hidden_states, - attention_mask=attention_mask, - init_cache=init_cache, - output_attentions=output_attentions, - deterministic=deterministic, - ) - - hidden_states = layer_outputs[0] - if output_attentions: - all_self_attns += (layer_outputs[1],) - - outputs = [hidden_states, all_hidden_states, all_self_attns] - return outputs - - -class FlaxOPTLearnedPositionalEmbedding(nn.Embed): - """ - This module learns positional embeddings up to a fixed maximum size. - """ - - def setup(self): - self.offset = 2 - self.embedding = self.param( - "embedding", self.embedding_init, (self.num_embeddings + self.offset, self.features), self.param_dtype - ) - - def __call__(self, positions): - """`input_ids_shape` is expected to be [bsz x seqlen].""" - - return super().__call__(positions + self.offset) - - -class FlaxOPTDecoder(nn.Module): - config: OPTConfig - dtype: jnp.dtype = jnp.float32 # the dtype of the computation - offset: int = 2 - - def setup(self): - self.dropout_layer = nn.Dropout(rate=self.config.dropout) - - embed_dim = self.config.hidden_size - self.padding_idx = self.config.pad_token_id - self.max_target_positions = self.config.max_position_embeddings - - self.embed_tokens = nn.Embed( - self.config.vocab_size, - self.config.word_embed_proj_dim, - embedding_init=jax.nn.initializers.normal(self.config.init_std), - dtype=self.dtype, - param_dtype=self.dtype, - ) - - self.embed_positions = FlaxOPTLearnedPositionalEmbedding( - self.config.max_position_embeddings, - embed_dim, - embedding_init=jax.nn.initializers.normal(self.config.init_std), - dtype=self.dtype, - param_dtype=self.dtype, - ) - - if self.config.word_embed_proj_dim != self.config.hidden_size: - self.project_in = nn.Dense(self.config.hidden_size, use_bias=False) - self.project_out = nn.Dense(self.config.word_embed_proj_dim, use_bias=False) - - else: - self.project_in = None - self.project_out = None - - # Note that the only purpose of `config._remove_final_layer_norm` is to keep backward compatibility - # with checkpoints that have been fine-tuned before transformers v4.20.1 - # see https://github.com/facebookresearch/metaseq/pull/164 - if self.config.do_layer_norm_before and not self.config._remove_final_layer_norm: - self.final_layer_norm = nn.LayerNorm( - dtype=self.dtype, - epsilon=1e-05, - param_dtype=self.dtype, - ) - else: - self.final_layer_norm = None - - self.layers = FlaxOPTDecoderLayerCollection(self.config, self.dtype) - - def __call__( - self, - input_ids, - attention_mask, - position_ids, - init_cache: bool = False, - output_attentions: bool = False, - output_hidden_states: bool = False, - return_dict: bool = True, - deterministic: bool = True, - ): - input_shape = input_ids.shape - input_ids = input_ids.reshape(-1, input_shape[-1]) - - inputs_embeds = self.embed_tokens(input_ids) - if self.project_in is not None: - inputs_embeds = self.project_in(inputs_embeds) - - positions = self.embed_positions(position_ids) - - hidden_states = inputs_embeds + positions - - hidden_states = self.dropout_layer(hidden_states, deterministic=deterministic) - - hidden_state, all_hidden_states, attentions = self.layers( - hidden_states, - attention_mask, - deterministic=deterministic, - init_cache=init_cache, - output_attentions=output_attentions, - output_hidden_states=output_hidden_states, - ) - - if self.final_layer_norm is not None: - hidden_state = self.final_layer_norm(hidden_state) - - if self.project_out is not None: - hidden_state = self.project_out(hidden_state) - - if output_hidden_states: - all_hidden_states += (hidden_state,) - - outputs = [hidden_state, all_hidden_states, attentions] - - if not return_dict: - return tuple(v for v in outputs if v is not None) - - return FlaxBaseModelOutput( - last_hidden_state=hidden_state, - hidden_states=all_hidden_states, - attentions=attentions, - ) - - -class FlaxOPTPreTrainedModel(FlaxPreTrainedModel): - config_class = OPTConfig - base_model_prefix: str = "model" - module_class: nn.Module = None - - def __init__( - self, - config: OPTConfig, - input_shape: Tuple[int] = (1, 1), - seed: int = 0, - dtype: jnp.dtype = jnp.float32, - _do_init: bool = True, - **kwargs, - ): - module = self.module_class(config=config, dtype=dtype, **kwargs) - super().__init__(config, module, input_shape=input_shape, seed=seed, dtype=dtype, _do_init=_do_init) - - def init_weights(self, rng: jax.random.PRNGKey, input_shape: Tuple, params: FrozenDict = None) -> FrozenDict: - # init input tensors - input_ids = jnp.zeros(input_shape, dtype="i4") - attention_mask = jnp.ones_like(input_ids) - - batch_size, sequence_length = input_ids.shape - position_ids = jnp.broadcast_to(jnp.arange(sequence_length)[None, :], (batch_size, sequence_length)) - - params_rng, dropout_rng = jax.random.split(rng) - rngs = {"params": params_rng, "dropout": dropout_rng} - - module_init_outputs = self.module.init( - rngs, - input_ids, - attention_mask, - position_ids, - return_dict=False, - ) - - random_params = module_init_outputs["params"] - if params is not None: - random_params = flatten_dict(unfreeze(random_params)) - params = flatten_dict(unfreeze(params)) - for missing_key in self._missing_keys: - params[missing_key] = random_params[missing_key] - self._missing_keys = set() - return freeze(unflatten_dict(params)) - else: - return random_params - - def init_cache(self, batch_size, max_length): - r""" - Args: - batch_size (`int`): - batch_size used for fast auto-regressive decoding. Defines the batch size of the initialized cache. - max_length (`int`): - maximum possible length for auto-regressive decoding. Defines the sequence length of the initialized - cache. - """ - # init input variables to retrieve cache - input_ids = jnp.ones((batch_size, max_length), dtype="i4") - attention_mask = jnp.ones_like(input_ids, dtype="i4") - position_ids = jnp.broadcast_to(jnp.arange(jnp.atleast_2d(input_ids).shape[-1]), input_ids.shape) - - init_variables = self.module.init( - jax.random.PRNGKey(0), input_ids, attention_mask, position_ids, return_dict=False, init_cache=True - ) - return unfreeze(init_variables["cache"]) - - def __call__( - self, - input_ids: jnp.ndarray, - attention_mask: Optional[jnp.ndarray] = None, - position_ids: Optional[jnp.ndarray] = None, - params: dict = None, - past_key_values: dict = None, - output_attentions: Optional[bool] = None, - output_hidden_states: Optional[bool] = None, - return_dict: Optional[bool] = None, - dropout_rng: PRNGKey = None, - deterministic: bool = True, - ): - output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions - output_hidden_states = ( - output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states - ) - return_dict = return_dict if return_dict is not None else self.config.return_dict - - if attention_mask is None: - attention_mask = jnp.ones_like(input_ids) - - if position_ids is None: - position_ids = (attention_mask.cumsum(axis=1) * attention_mask) - 1 - - # Handle any PRNG if needed - rngs = {"dropout": dropout_rng} if dropout_rng is not None else {} - - inputs = {"params": params or self.params} - - # if past_key_values are passed then cache is already initialized a private flag init_cache has to be passed - # down to ensure cache is used. It has to be made sure that cache is marked as mutable so that it can be - # changed by FlaxOPTAttention module - if past_key_values is not None: - inputs["cache"] = past_key_values - mutable = ["cache"] - else: - mutable = False - - outputs = self.module.apply( - inputs, - input_ids=jnp.array(input_ids, dtype="i4"), - attention_mask=jnp.array(attention_mask, dtype="i4"), - position_ids=jnp.array(position_ids, dtype="i4"), - output_attentions=output_attentions, - output_hidden_states=output_hidden_states, - return_dict=return_dict, - deterministic=deterministic, - rngs=rngs, - mutable=mutable, - ) - - # breakpoint() - # add updated cache to model output - if past_key_values is not None and return_dict: - outputs, past_key_values = outputs - outputs["past_key_values"] = unfreeze(past_key_values["cache"]) - return outputs - elif past_key_values is not None: - outputs, past_key_values = outputs - outputs = outputs[:1] + (unfreeze(past_key_values["cache"]),) + outputs[1:] - - return outputs - - -class FlaxOPTModule(nn.Module): - config: OPTConfig - dtype: jnp.dtype = jnp.float32 # the dtype of the computation - - def setup(self): - self.decoder = FlaxOPTDecoder(self.config, dtype=self.dtype) - - def _get_decoder_module(self): - return self.decoder - - def __call__( - self, - input_ids, - attention_mask, - position_ids, - output_attentions: bool = False, - output_hidden_states: bool = False, - return_dict: bool = True, - deterministic: bool = True, - init_cache=False, - ): - decoder_outputs = self.decoder( - input_ids=input_ids, - attention_mask=attention_mask, - position_ids=position_ids, - output_attentions=output_attentions, - output_hidden_states=output_hidden_states, - return_dict=return_dict, - deterministic=deterministic, - init_cache=init_cache, - ) - - if not return_dict: - return decoder_outputs - - return FlaxBaseModelOutput( - last_hidden_state=decoder_outputs.last_hidden_state, - hidden_states=decoder_outputs.hidden_states, - attentions=decoder_outputs.attentions, - ) - - -# Copied from transformers.models.bart.modeling_flax_bart.FlaxBartModel with Bart->OPT -class FlaxOPTModel(FlaxOPTPreTrainedModel): - config: OPTConfig - dtype: jnp.dtype = jnp.float32 # the dtype of the computation - module_class = FlaxOPTModule - - -append_call_sample_docstring(FlaxOPTModel, _CHECKPOINT_FOR_DOC, FlaxBaseModelOutput, _CONFIG_FOR_DOC) - - -@add_start_docstrings( - "The bare OPT Model transformer outputting raw hidden-states without any specific head on top.", - OPT_START_DOCSTRING, -) -class FlaxOPTForCausalLMModule(nn.Module): - config: OPTConfig - dtype: jnp.dtype = jnp.float32 - - def setup(self): - self.model = FlaxOPTModule(config=self.config, dtype=self.dtype) - self.lm_head = nn.Dense( - self.config.vocab_size, - use_bias=False, - dtype=self.dtype, - kernel_init=jax.nn.initializers.normal(self.config.init_std), - ) - - def __call__( - self, - input_ids, - attention_mask, - position_ids, - init_cache: bool = False, - output_attentions: bool = False, - output_hidden_states: bool = False, - return_dict: bool = True, - deterministic: bool = True, - ): - outputs = self.model( - input_ids, - attention_mask, - position_ids, - init_cache=init_cache, - output_attentions=output_attentions, - output_hidden_states=output_hidden_states, - return_dict=return_dict, - deterministic=deterministic, - ) - - hidden_states = outputs[0] - - if self.config.tie_word_embeddings: - shared_embedding = self.model.variables["params"]["decoder"]["embed_tokens"]["embedding"] - lm_logits = self.lm_head.apply({"params": {"kernel": shared_embedding.T}}, hidden_states) - else: - lm_logits = self.lm_head(hidden_states) - - if not return_dict: - return (lm_logits,) + outputs[1:] - - return FlaxMaskedLMOutput( - logits=lm_logits, - hidden_states=outputs.hidden_states, - attentions=outputs.attentions, - ) - - -@add_start_docstrings( - """ - OPT Model with a language modeling head on top (linear layer with weights tied to the input embeddings) e.g for - autoregressive tasks. - """, - OPT_START_DOCSTRING, -) -class FlaxOPTForCausalLM(FlaxOPTPreTrainedModel): - module_class = FlaxOPTForCausalLMModule - - def prepare_inputs_for_generation(self, input_ids, max_length, attention_mask: Optional[jax.Array] = None): - # initializing the cache - batch_size, seq_length = input_ids.shape - - past_key_values = self.init_cache(batch_size, max_length) - # Note that usually one would have to put 0's in the attention_mask for x > input_ids.shape[-1] and x < cache_length. - # But since the decoder uses a causal mask, those positions are masked anyway. - # Thus, we can create a single static attention_mask here, which is more efficient for compilation - extended_attention_mask = jnp.ones((batch_size, max_length), dtype="i4") - - if attention_mask is not None: - position_ids = attention_mask.cumsum(axis=1) - 1 - extended_attention_mask = lax.dynamic_update_slice(extended_attention_mask, attention_mask, (0, 0)) - else: - position_ids = jnp.broadcast_to(jnp.arange(seq_length, dtype="i4")[None, :], (batch_size, seq_length)) - - return { - "past_key_values": past_key_values, - "attention_mask": extended_attention_mask, - "position_ids": position_ids, - } - - def update_inputs_for_generation(self, past_key_values, model_kwargs): - model_kwargs["past_key_values"] = past_key_values - model_kwargs["position_ids"] = model_kwargs["position_ids"][:, -1:] + 1 - return model_kwargs - - -append_call_sample_docstring(FlaxOPTForCausalLM, _CHECKPOINT_FOR_DOC, FlaxBaseModelOutput, _CONFIG_FOR_DOC) diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/opt_utils.py b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/opt_utils.py deleted file mode 100644 index 59ad9fcb23a7c618f66b7e32e71da5f415530210..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/opt_utils.py +++ /dev/null @@ -1,278 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import pathlib -from dataclasses import dataclass -from typing import Tuple - -# pytype: disable=import-error -import jax.lax as lax -import jax.numpy as jnp -import numpy as np -from flax.core.frozen_dict import freeze, unfreeze -from flax.traverse_util import flatten_dict, unflatten_dict -from jax import device_put -from jax.experimental.pjit import pjit -from jax.sharding import Mesh, PartitionSpec -from modeling_flax_opt import FlaxOPTForCausalLM -from transformers import AutoConfig, AutoTokenizer, FlaxLogitsProcessorList, FlaxMinLengthLogitsProcessor - -try: - from transformers.generation.flax_utils import GreedyState -except ImportError: - # as in transformers<=4.24.0 - from transformers.generation_flax_utils import GreedyState - -from transformers.models.opt import OPTConfig - -# pytype: enable=import-error - -MODEL_PARALLEL = "mp" - -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") -LOGGER = logging.getLogger("jax.opt_utils") -LOGGER.setLevel(level=logging.INFO) - - -@dataclass -class Config: - n_layers: int - n_heads: int - d_model: int - - -CONFIGS = { - "125M": Config(12, 12, 768), - "350M": Config(24, 16, 1024), - "1.3B": Config(24, 32, 2048), - "2.7B": Config(32, 32, 2560), - "5B": Config(24, 32, 128 * 32), - "6.7B": Config(32, 32, 4096), - "13B": Config(40, 40, 5120), - "20B": Config(44, 48, 128 * 48), - "30B": Config(48, 56, 7168), - "66B": Config(64, 72, 9216), - "89B": Config(48, 96, 128 * 96), - "175B": Config(96, 96, 12288), - "310B": Config(96, 128, 128 * 128), - "530B": Config(105, 128, 160 * 128), -} - -TP_RULES = { - ("model", "decoder", "embed_positions", "embedding"): PartitionSpec(None, None), - ("model", "decoder", "embed_tokens", "embedding"): PartitionSpec(None, None), - ("model", "decoder", "final_layer_norm", "bias"): PartitionSpec(None), - ("model", "decoder", "final_layer_norm", "scale"): PartitionSpec(None), - ("model", "decoder", "layers", "{{{LAYER_NUM}}}", "fc1", "bias"): PartitionSpec(MODEL_PARALLEL), - ("model", "decoder", "layers", "{{{LAYER_NUM}}}", "fc1", "kernel"): PartitionSpec(None, MODEL_PARALLEL), - ("model", "decoder", "layers", "{{{LAYER_NUM}}}", "fc2", "bias"): PartitionSpec(None), - ("model", "decoder", "layers", "{{{LAYER_NUM}}}", "fc2", "kernel"): PartitionSpec(MODEL_PARALLEL, None), - ("model", "decoder", "layers", "{{{LAYER_NUM}}}", "final_layer_norm", "bias"): PartitionSpec(None), - ("model", "decoder", "layers", "{{{LAYER_NUM}}}", "final_layer_norm", "scale"): PartitionSpec(None), - ("model", "decoder", "layers", "{{{LAYER_NUM}}}", "self_attn", "k_proj", "bias"): PartitionSpec(MODEL_PARALLEL), - ("model", "decoder", "layers", "{{{LAYER_NUM}}}", "self_attn", "k_proj", "kernel"): PartitionSpec( - None, MODEL_PARALLEL - ), - ("model", "decoder", "layers", "{{{LAYER_NUM}}}", "self_attn", "q_proj", "bias"): PartitionSpec(MODEL_PARALLEL), - ("model", "decoder", "layers", "{{{LAYER_NUM}}}", "self_attn", "q_proj", "kernel"): PartitionSpec( - None, MODEL_PARALLEL - ), - ("model", "decoder", "layers", "{{{LAYER_NUM}}}", "self_attn", "v_proj", "bias"): PartitionSpec(MODEL_PARALLEL), - ("model", "decoder", "layers", "{{{LAYER_NUM}}}", "self_attn", "v_proj", "kernel"): PartitionSpec( - None, MODEL_PARALLEL - ), - ("model", "decoder", "layers", "{{{LAYER_NUM}}}", "self_attn", "out_proj", "bias"): PartitionSpec(None), - ("model", "decoder", "layers", "{{{LAYER_NUM}}}", "self_attn", "out_proj", "kernel"): PartitionSpec( - MODEL_PARALLEL, None - ), - ("model", "decoder", "layers", "{{{LAYER_NUM}}}", "self_attn_layer_norm", "bias"): PartitionSpec(None), - ("model", "decoder", "layers", "{{{LAYER_NUM}}}", "self_attn_layer_norm", "scale"): PartitionSpec(None), -} - - -def get_params_spec(num_layers, params=None): - param_specs = {} - for key, spec in TP_RULES.items(): - if any("{{{LAYER_NUM}}}" in n for n in key): - for layer_num in range(num_layers): - param_specs[tuple(n.replace("{{{LAYER_NUM}}}", str(layer_num)) for n in key)] = spec - else: - param_specs[key] = spec - params_spec = freeze(unflatten_dict(param_specs)) - - if params is not None: - params_keys = set(flatten_dict(params).keys()) - params_spec_unfreeze = flatten_dict(unfreeze(params_spec)) - for key in tuple(params_spec_unfreeze.keys()): - if key not in params_keys: - del params_spec_unfreeze[key] - params_spec = freeze(unflatten_dict(params_spec_unfreeze)) - - return params_spec - - -def get_config(name: str): - if name.split("/")[0] == "random": - name = name.split("/")[-1] - config = CONFIGS[name] - else: - config = AutoConfig.from_pretrained(name) - - return config - - -def get_model(name: str, cache_dir: pathlib.Path) -> Tuple: - config = get_config(name) - if name == "facebook/opt-13b": - config._remove_final_layer_norm = True - if name.split("/")[0] == "random": - hf_config = OPTConfig( - hidden_size=config.d_model, - num_attention_heads=config.n_heads, - num_hidden_layers=config.n_layers, - ffn_dim=4 * config.d_model, - ) - model, params = FlaxOPTForCausalLM( - config=hf_config, - dtype=jnp.float16, - _do_init=False, - ) - else: - model, params = FlaxOPTForCausalLM.from_pretrained( - name, - config=config, - dtype=jnp.float16, - cache_dir=cache_dir.as_posix(), - _do_init=False, - ) - - return model, params - - -def get_tokenizer(name: str = "facebook/opt-30b"): - return AutoTokenizer.from_pretrained(name) - - -def greedy_search(model, params, input_ids, requested_len): - LOGGER.info("Compiling greedy search....") - pad_token_id = model.config.pad_token_id - eos_token_id = model.config.eos_token_id - - batch_size, cur_len = input_ids.shape - max_length = requested_len - - logits_processor = FlaxLogitsProcessorList() - logits_processor.append(FlaxMinLengthLogitsProcessor(model.config.min_length, model.config.eos_token_id)) - - eos_token_id = jnp.array(eos_token_id) - pad_token_id = jnp.array(pad_token_id) - cur_len = jnp.array(cur_len) - - # per batch-item holding current token in loop. - sequences = jnp.full((batch_size, max_length), pad_token_id, dtype=jnp.int32) - sequences = lax.dynamic_update_slice(sequences, input_ids, (0, 0)) - - # per batch-item state bit indicating if sentence has finished. - is_sent_finished = jnp.zeros((batch_size,), dtype=jnp.bool_) - - # For Seq2Seq generation, we only need to use the decoder instead of the whole model in generation loop - # and pass it the `encoder_outputs`, which are part of the `model_kwargs`. - # initialize model specific kwargs - model_kwargs = model.prepare_inputs_for_generation(input_ids, max_length) - - # initialize state - state = GreedyState( - cur_len=cur_len, - sequences=sequences, - running_token=input_ids, - is_sent_finished=is_sent_finished, - model_kwargs=model_kwargs, - ) - - def greedy_search_cond_fn(state): - """state termination condition fn.""" - has_reached_max_length = state.cur_len == max_length - all_sequence_finished = jnp.all(state.is_sent_finished) - finish_generation = jnp.logical_or(has_reached_max_length, all_sequence_finished) - return ~finish_generation - - def greedy_search_body_fn(state): - """state update fn.""" - logits, cache = model( - input_ids=state.running_token, - params=params, - past_key_values=state.model_kwargs["past_key_values"], - attention_mask=state.model_kwargs["attention_mask"], - position_ids=state.model_kwargs["position_ids"], - return_dict=False, - ) - logits = logits[:, -1] - - # apply min_length, ... - logits = logits_processor(state.sequences, logits, state.cur_len) - - next_token = jnp.argmax(logits, axis=-1) - - next_token = next_token * ~state.is_sent_finished + pad_token_id * state.is_sent_finished - next_is_sent_finished = state.is_sent_finished | (next_token == eos_token_id) - next_token = next_token[:, None] - - next_sequences = lax.dynamic_update_slice(state.sequences, next_token, (0, state.cur_len)) - next_model_kwargs = model.update_inputs_for_generation(cache, state.model_kwargs) - return GreedyState( - cur_len=state.cur_len + 1, - sequences=next_sequences, - running_token=next_token, - is_sent_finished=next_is_sent_finished, - model_kwargs=next_model_kwargs, - ) - - if input_ids.shape[1] > 1: - state = greedy_search_body_fn(state) - - state = lax.while_loop(greedy_search_cond_fn, greedy_search_body_fn, state) - - return state.sequences - - -def shard_params(model, init_params, params_spec, mesh_devices): - with Mesh(np.array(mesh_devices), (MODEL_PARALLEL,)): - if init_params is None: - params = pjit( - lambda: model.init_weights(model.key, model.input_shape), - in_shardings=None, - out_shardings=params_spec, - )() - else: - new_params = {} - init_params = flatten_dict(init_params) - params_spec = flatten_dict(params_spec) - - for key in init_params.keys(): - init_param = init_params[key] - init_param = device_put(init_param, mesh_devices[0]) - - new_params[key] = pjit( - lambda x: x, - in_shardings=None, - out_shardings=params_spec[key], - )(init_param) - params = freeze(unflatten_dict(new_params)) - params_spec = freeze(unflatten_dict(params_spec)) - num_params_b = np.sum([v.size for v in flatten_dict(params).values()]) / 10**9 - num_params = f"{num_params_b:.2f}B" if num_params_b > 1 else f"{num_params_b * 1000:.2f}M" - LOGGER.info(f"Number of params: {num_params}") - - return params diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/server.py b/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/server.py deleted file mode 100644 index 21ff969952fbaf536e77fb903192e2b346d4a0cb..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_opt_multinode_jax/server.py +++ /dev/null @@ -1,263 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import functools -import logging -import os -import pathlib -import pickle -import socket -import struct -import tempfile - -import filelock - -# pytype: disable=import-error -import jax -import numpy as np -from jax.experimental.pjit import pjit -from jax.sharding import Mesh, PartitionSpec -from opt_utils import MODEL_PARALLEL, get_model, get_params_spec, get_tokenizer, greedy_search, shard_params - -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton - -# pytype: enable=import-error - - -TRITON_MODEL_NAME = "OPT" - -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") -LOGGER = logging.getLogger("jax.server") -LOGGER.setLevel(level=logging.INFO) - - -def run(model, params, number_of_gpus, max_batch_size, server_ip, port, number_of_nodes, rank): - params_spec = get_params_spec(model.config.num_hidden_layers, params) - - LOGGER.info(f"Available devices: {jax.local_devices()}.") - mesh_devices = jax.local_devices()[:number_of_gpus] - - LOGGER.info(f"Selected devices: {mesh_devices}.") - params = shard_params(model, params, params_spec, mesh_devices) - - LOGGER.info("Initialize model") - infer = pjit( - functools.partial(greedy_search, model), - in_shardings=(params_spec, PartitionSpec(None, None)), - out_shardings=None, - static_argnums=2, - ) - - def _server(): - LOGGER.info("Initialize tokenizer.") - tokenizer = get_tokenizer() - - LOGGER.info("Initialize socket for communication with worker.") - # open a socket to communicate with workers - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - s.bind((server_ip, port)) - s.listen() - - def wrapper(params, **inputs): - text, output_len = inputs.values() - text = np.char.decode(text.astype("bytes"), "utf-8") - text = text[:, 0] # squeeze 2nd axis - input_ids = tokenizer(text.tolist(), return_tensors="np")["input_ids"].astype(np.int64) - max_len = input_ids.shape[1] + output_len[0].item() - batch_size = input_ids.shape[0] - - conn_count = 0 - # wait until all the workers receive input data - while conn_count < number_of_nodes - 1: - LOGGER.debug("Broadcast to workers") - conn, _ = s.accept() - with conn: - data = pickle.dumps({"max_len": max_len, "batch_size": batch_size, "input_ids": input_ids}) - conn.sendall(struct.pack(">I", len(data))) - conn.sendall(data) - - conn_count += 1 - - LOGGER.debug("Collecting outputs") - with Mesh(np.array(mesh_devices), (MODEL_PARALLEL,)): - outputs = np.array(infer(params, input_ids, max_len)) - - LOGGER.debug(f"Result: {outputs}") - decoded = tokenizer.batch_decode(outputs, skip_special_tokens=True, clean_up_tokenization_spaces=False) - LOGGER.debug(f"Decoded result: {decoded}") - - res = [np.array([decoded])] - return res - - with Triton() as triton: - LOGGER.info("Loading OPT model.") - triton.bind( - model_name=TRITON_MODEL_NAME, - infer_func=batch(functools.partial(wrapper, params)), - inputs=[ - Tensor(name="input", dtype=np.bytes_, shape=(1,)), - Tensor(name="output_length", dtype=np.int64, shape=(1,)), - ], - outputs=[ - Tensor(name="output", dtype=np.bytes_, shape=(1,)), - ], - config=ModelConfig(max_batch_size=max_batch_size), - strict=True, - ) - # Serve model through Triton Inference Server - LOGGER.info("Serving inference") - triton.serve() - - def _worker(): - while True: - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: - input_ids, max_len = None, None - # try to connect with the server until it send input data - while input_ids is None or max_len is None: - try: - s.connect((server_ip, port)) - data_size = struct.unpack(">I", s.recv(4))[0] - received_payload = b"" - reamining_payload_size = data_size - while reamining_payload_size != 0: - received_payload += s.recv(reamining_payload_size) - reamining_payload_size = data_size - len(received_payload) - data = pickle.loads(received_payload) - max_len, batch_size = data["max_len"], data["batch_size"] - input_ids = data["input_ids"].reshape((batch_size, -1)) - except ConnectionRefusedError: - pass - - LOGGER.debug(f"{input_ids}, {max_len}") - with Mesh(np.array(mesh_devices), (MODEL_PARALLEL,)): - infer(params, input_ids, max_len) - - if rank == 0: - LOGGER.info(f"Starting server at rank {rank}") - _server() - else: - LOGGER.info(f"Starting worker at rank {rank}") - _worker() - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument( - "--model-name", - type=str, - required=True, - help="Name of the HuggingFace model to serve.", - ) - parser.add_argument( - "--head-url", - type=str, - default="localhost:12345", - help="Server IP and port pair in form of : for head node.", - ) - parser.add_argument( - "--socket-port", - type=int, - default="65432", - help="Port for socket communication to push array for compute to all workers.", - ) - parser.add_argument( - "--number-of-nodes", - type=int, - default=1, - help="Number of nodes.", - ) - parser.add_argument( - "--rank", - type=int, - default=0, - help="Rank of current host - 0 mean the head node.", - ) - parser.add_argument( - "--number-of-gpus", - type=int, - default=1, - help="Number of gpus used for model.", - ) - parser.add_argument( - "--max-batch-size", - type=int, - default=256, - help="The maximal batch size used for model.", - ) - parser.add_argument( - "--cache-dir", - type=str, - default=None, - help="Location of cache to avoid download model for multiple nodes.", - ) - parser.add_argument( - "--verbose", - action="store_true", - help="Enable verbose logging.", - ) - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - LOGGER.setLevel(log_level) - - os.environ["XLA_PYTHON_CLIENT_PREALLOCATE"] = "false" - os.environ["NCCL_LAUNCH_MODE"] = "PARALLEL" - - head_url = args.head_url - number_of_nodes = args.number_of_nodes - rank = args.rank - - LOGGER.info(f"Head url: {head_url}") - LOGGER.info(f"Number of nodes: {number_of_nodes}") - LOGGER.info(f"Host rank: {rank}") - - jax.distributed.initialize(head_url, number_of_nodes, rank) - LOGGER.info(f"{jax.devices()=}") - LOGGER.info(f"{jax.local_devices()=}") - - with tempfile.TemporaryDirectory() as tempdir: - cache_dir = args.cache_dir - if not cache_dir: - cache_dir = tempdir - - cache_dir = pathlib.Path(cache_dir) - LOGGER.info(f"Cache location: {cache_dir}") - - lock_file = cache_dir / "lock" / "jax_opt.lock" - lock_file.parent.mkdir(parents=True, exist_ok=True) - - lock = filelock.FileLock(lock_file.as_posix()) - LOGGER.info(f"Lock in {lock_file}") - with lock: - model, params = get_model(args.model_name, cache_dir) - - server_ip, port = args.head_url.split(":") - - run( - model=model, - params=params, - max_batch_size=args.max_batch_size, - number_of_gpus=args.number_of_gpus, - server_ip=server_ip, - port=int(args.socket_port), - number_of_nodes=number_of_nodes, - rank=rank, - ) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/README.md b/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/README.md deleted file mode 100644 index 16c1f1a7a7f92a9b46150fae10bf1f5e5d5f3a9a..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/README.md +++ /dev/null @@ -1,139 +0,0 @@ - - -# HuggingFace ResNet50 PyTorch Model - -## Overview - -The example presents a HuggingFace ResNet50 PyTorch model inference. - -Example consists of following scripts: - -- `install.sh` - install additional dependencies for downloading model from HuggingFace -- `server.py` - start the model with Triton Inference Server -- `client.py` - execute HTTP/gRPC requests to the deployed model - -And configurations: - -- `kubernetes` - example Helm Charts for serving and test inference in Kubernetes cluster - -## Running example locally - -To run example locally the `torch` package is required. It can be installed in your current environment using pip: - -```shell -pip install torch -``` - -Or you can use NVIDIA PyTorch container: - -```shell -docker run -it --gpus 1 --shm-size 8gb -v {repository_path}:{repository_path} -w {repository_path} nvcr.io/nvidia/pytorch:23.10-py3 bash -``` - -If you select to use container we recommend to install -[NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/overview.html). - -Follow the step-by-step guide to execute the example: - -1. Install PyTriton following the [installation instruction](../../README.md#installation) -2. Install the additional packages using `install.sh` - -```shell -./install.sh -``` - -3. In current terminal start the model on Triton using `server.py` - -```shell -./server.py -``` - -4. Open new terminal tab (ex. `Ctrl + T` on Ubuntu) or window -5. Go to the example directory -6. Run the `client.py` to perform queries on model: - -```shell -./client.py -``` - -## Running example on Kubernetes cluster - -The following prerequisites must be matched to run the example: - -- Kubernetes cluster with NVIDIA GPU node -- [NVIDIA Device Plugin](https://github.com/NVIDIA/k8s-device-plugin) installed in Kubernetes cluster -- Docker Containers Registry accessible from Kubernetes cluster -- [Installed Helm](https://helm.sh/docs/intro/install/) for creating the deployment and test job - -Optionally you may install NVIDIA Container Toolkit and NVIDIA GPU Operator which enable more features -like [MIG](https://docs.nvidia.com/datacenter/cloud-native/gpu-operator/gpu-operator-mig.html) or -[Time Slicing](https://docs.nvidia.com/datacenter/cloud-native/gpu-operator/gpu-sharing.html) support in the cluster. -To learn more how to set up Kubernetes cluster with NVIDIA GPU you can review [ -NVIDIA Cloud Native Documentation](https://docs.nvidia.com/datacenter/cloud-native/contents.html) - -Below, we present a step-by-step guide assuming that **all the commands are executed from the root of repository**. - -Follow these steps to run and test example in the cluster: -1. [Optional] Build PyTriton wheel following the [build instruction](../../docs/building.md) -2. Prepare the tag under which image is going to be pushed to your Docker Containers Registry accessible from Kubernetes -cluster. Example for local cluster (minikube, k3s) with registry hosted inside the cluster: -```shell -export DOCKER_IMAGE_NAME_WITH_TAG=localhost:5000/resnet-pytorch-example:latest -``` -3. Build and push the Docker container image to your registry: - -```shell -# Export the base image used for build -export FROM_IMAGE_NAME=nvcr.io/nvidia/pytorch:23.10-py3 -./examples/huggingface_resnet_pytorch/kubernetes/build_and_push.sh -``` - -**Note**: By default the container is built using `pytriton` package from `GitHub`. To build container with wheel built -locally use `export BUILD_FROM=dist` before executing script. - -4. Install the Helm Chart with deployment and service: - -```shell -helm upgrade -i --set deployment.image=${DOCKER_IMAGE_NAME_WITH_TAG} \ -resnet-pytorch-example \ -./examples/huggingface_resnet_pytorch/kubernetes/deployment -``` - -5. Install the Helm Chart with client test - -```shell -helm install --set image=${DOCKER_IMAGE_NAME_WITH_TAG} \ -resnet-pytorch-example-test \ -./examples/huggingface_resnet_pytorch/kubernetes/test -``` - -Now, you can review the logs from the running PODs to verify the inference is running. To show the logs from cluster -for given POD first list all running pods: -```shell -kubectl get pods -``` - -Next show logs from server or client: -```shell -kubectl logs {NAME} -``` - -To remove the installed charts simply run: -```shell -helm uninstall resnet-pytorch-example-test -helm uninstall resnet-pytorch-example -``` \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/__init__.py b/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/client.py b/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/client.py deleted file mode 100644 index d8217dca2740ff3e5cab70863d7623c60ca50dfa..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/client.py +++ /dev/null @@ -1,101 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Client for ResNet50 classifier sample server.""" -import argparse -import io -import logging -from concurrent.futures import ThreadPoolExecutor - -import numpy as np -from datasets import load_dataset # pytype: disable=import-error - -from pytriton.client import ModelClient - -logger = logging.getLogger("examples.huggingface_bart_pytorch.client") - - -def infer_model(thread_idx, args, dataset): - with ModelClient(args.url, "ResNet", init_timeout_s=args.init_timeout_s) as client: - image = dataset["image"][0] - logger.info(f"Image: {image}") - - output = io.BytesIO() - image.save(output, format="JPEG") - image = np.frombuffer(output.getbuffer(), dtype=np.uint8) - - logger.info(f"Running inference requests in thread {thread_idx}.") - - for req_idx in range(1, args.iterations + 1): - logger.debug(f"Sending request ({req_idx}) in thread {thread_idx}.") - result_data = client.infer_sample(image) - logger.debug(f"Result: {result_data} for request ({req_idx}) in thread {thread_idx}.") - - logger.info(f"Last result: {result_data} for request ({req_idx}) in thread {thread_idx}.") - - -def main(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--url", - default="localhost", - help=( - "Url to Triton server (ex. grpc://localhost:8001)." - "HTTP protocol with default port is used if parameter is not provided" - ), - required=False, - ) - parser.add_argument( - "--init-timeout-s", - type=float, - default=600.0, - help="Server and model ready state timeout in seconds", - required=False, - ) - parser.add_argument( - "--concurrency", - type=int, - default=32, - help="Number of concurrent requests.", - required=False, - ) - parser.add_argument( - "--iterations", - type=int, - default=1, - help="Number of requests per client.", - required=False, - ) - parser.add_argument( - "--verbose", - action="store_true", - default=False, - ) - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - dataset = load_dataset("huggingface/cats-image", split="test") - with ThreadPoolExecutor(max_workers=args.concurrency) as executor: - running_tasks = [ - executor.submit(infer_task, idx, args, dataset) - for idx, infer_task in enumerate([infer_model] * args.concurrency) - ] - for running_task in running_tasks: - running_task.result() - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/install.sh b/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/install.sh deleted file mode 100644 index 33762fa37795b855ce268c816c959dc51aa74fc4..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/install.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -set -xe - -pip install transformers datasets diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/Dockerfile b/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/Dockerfile deleted file mode 100644 index f813eb26c159f66e295ca1a7f1d0b6230f4eb9a4..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/Dockerfile +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -ARG FROM_IMAGE_NAME=nvcr.io/nvidia/pytorch:23.10-py3 -ARG BUILD_FROM - -FROM ${FROM_IMAGE_NAME} as base -WORKDIR /opt/app - -# Use when build PyTriton from source -FROM base as install-from-dist -COPY dist/*.whl /opt/app -RUN pip install /opt/app/*.whl - -# Install from pypi -FROM base as install-from-pypi -RUN pip install -U nvidia-pytriton - -FROM install-from-${BUILD_FROM} AS image - -ENV PYTHONUNBUFFERED=1 - -WORKDIR /opt/app - -COPY examples/huggingface_resnet_pytorch/install.sh /opt/app -RUN /opt/app/install.sh - -COPY examples/huggingface_resnet_pytorch/client.py /opt/app -COPY examples/huggingface_resnet_pytorch/server.py /opt/app - -ENTRYPOINT [] \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/build_and_push.sh b/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/build_and_push.sh deleted file mode 100644 index 39de66a0ccc2f442091d35a09821789941745929..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/build_and_push.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -if [ -z ${DOCKER_IMAGE_NAME_WITH_TAG} ]; then - echo "Provide Docker image name under to push the created image to your registry" - echo "Example:" - echo " export DOCKER_IMAGE_NAME_WITH_TAG=my-registry:5000/resnet-pytorch-example:latest" - exit 1 -fi - -if [ -z ${FROM_IMAGE_NAME} ]; then - echo "Provide Docker image that would be used as base image" - echo "Example:" - echo " export FROM_IMAGE_NAME=nvcr.io/nvidia/pytorch:23.10-py3" - exit 1 -fi - -BUILD_FROM="${BUILD_FROM:-pypi}" -if [[ ${BUILD_FROM} != "pypi" ]] && [[ ${BUILD_FROM} != "dist" ]]; then - echo "The BUILD_FROM variable should be equal to 'pypi' or 'dist'" - echo "Example:" - echo " export BUILD_FROM=dist" - exit 1 -fi - -set -xe - -DOCKER_BUILDKIT=1 docker build -f examples/huggingface_resnet_pytorch/kubernetes/Dockerfile \ - -t ${DOCKER_IMAGE_NAME_WITH_TAG} \ - --build-arg FROM_IMAGE_NAME=${FROM_IMAGE_NAME} \ - --build-arg BUILD_FROM=${BUILD_FROM} . -docker push ${DOCKER_IMAGE_NAME_WITH_TAG} \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/deployment/Chart.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/deployment/Chart.yaml deleted file mode 100644 index 0abc3354499a7056a4d775f2b6abe584ea0d7d1a..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/deployment/Chart.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v2 -description: ResNet PyTriton Example -name: resnet-pytorch-example -version: 0.0.1 -appVersion: 0.0.1 diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/deployment/templates/_helpers.tpl b/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/deployment/templates/_helpers.tpl deleted file mode 100644 index 79274ecc63d15d74bdd0b20f9fcf67a6666ee3e6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/deployment/templates/_helpers.tpl +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -{{/* vim: set filetype=mustache: */}} -{{/* -Expand the name of the chart. -*/}} -{{- define "selector.name" -}} -{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} -{{- end -}} - -{{/* -Create a default fully qualified app name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -If release name contains chart name it will be used as a full name. -*/}} -{{- define "selector.fullname" -}} -{{- if .Values.fullnameOverride -}} -{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- $name := default .Chart.Name .Values.nameOverride -}} -{{- if contains $name .Release.Name -}} -{{- .Release.Name | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} -{{- end -}} -{{- end -}} -{{- end -}} - -{{/* -Create chart name and version as used by the chart label. -*/}} -{{- define "selector.chart" -}} -{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} -{{- end -}} diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/deployment/templates/deployment.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/deployment/templates/deployment.yaml deleted file mode 100644 index 3810487ffc48a7b6bf19e9ad9ac7a73c9b99fda9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/deployment/templates/deployment.yaml +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: {{ .Release.Name }}-deployment - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{ template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - replicas: {{ .Values.replicaCount }} - selector: - matchLabels: - app: {{ template "selector.name" . }} - release: {{ .Release.Name }} - template: - metadata: - labels: - app: {{ template "selector.name" . }} - release: {{.Release.Name}} - annotations: - date: "{{ now | unixEpoch }}" - spec: - {{- if .Values.imagePullSecret }} - imagePullSecrets: - - name: {{.Values.imagePullSecret }} - {{ end }} - restartPolicy: {{ .Values.restartPolicy }} - containers: - - name: {{ .Chart.Name }} - image: {{ .Values.deployment.image }} - imagePullPolicy: {{ .Values.pullPolicy }} - command: ["bash", "-c", "/opt/app/server.py --verbose"] - ports: - - containerPort: 8000 - name: http - - containerPort: 8001 - name: grpc - - containerPort: 8002 - name: metrics - livenessProbe: - initialDelaySeconds: 60 - periodSeconds: 10 - httpGet: - path: /v2/health/live - port: http - readinessProbe: - initialDelaySeconds: 60 - periodSeconds: 10 - httpGet: - path: /v2/health/ready - port: http - resources: - requests: - nvidia.com/gpu: {{ .Values.deployment.numOfGPUs }} - limits: - nvidia.com/gpu: {{ .Values.deployment.numOfGPUs }} - volumeMounts: - - name: shared-memory - mountPath: /dev/shm - volumes: - - name: shared-memory - emptyDir: - medium: Memory diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/deployment/templates/service.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/deployment/templates/service.yaml deleted file mode 100644 index 79d7c7f62cd8d714edbd6aec8721d7691007d7c8..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/deployment/templates/service.yaml +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -kind: Service -metadata: - name: {{ template "selector.fullname" . }}-service - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - type: {{ .Values.service.type }} - ports: - - port: 8000 - targetPort: 8000 - name: http - - port: 8001 - targetPort: 8001 - name: grpc - - port: 8002 - targetPort: 8002 - name: metrics - selector: - app: {{ template "selector.name" . }} - release: {{ .Release.Name }} diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/deployment/values.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/deployment/values.yaml deleted file mode 100644 index 6c9106bbcc5ce27fa872fa3e4d7e4529872eaff9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/deployment/values.yaml +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -imagePullSecret: null -pullPolicy: Always -restartPolicy: Always -replicaCount: 1 -deployment: - image: null - numOfGPUs: 1 -service: - type: ClusterIP diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/test/Chart.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/test/Chart.yaml deleted file mode 100644 index 7a7869b3a245ad898fff3544d49708302a6c84fc..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/test/Chart.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v2 -description: ResNet PyTriton Example Test -name: resnet-pytorch-example-test -version: 0.0.1 -appVersion: 0.0.1 diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/test/templates/_helpers.tpl b/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/test/templates/_helpers.tpl deleted file mode 100644 index 79274ecc63d15d74bdd0b20f9fcf67a6666ee3e6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/test/templates/_helpers.tpl +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -{{/* vim: set filetype=mustache: */}} -{{/* -Expand the name of the chart. -*/}} -{{- define "selector.name" -}} -{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} -{{- end -}} - -{{/* -Create a default fully qualified app name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -If release name contains chart name it will be used as a full name. -*/}} -{{- define "selector.fullname" -}} -{{- if .Values.fullnameOverride -}} -{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- $name := default .Chart.Name .Values.nameOverride -}} -{{- if contains $name .Release.Name -}} -{{- .Release.Name | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} -{{- end -}} -{{- end -}} -{{- end -}} - -{{/* -Create chart name and version as used by the chart label. -*/}} -{{- define "selector.chart" -}} -{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} -{{- end -}} diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/test/templates/job.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/test/templates/job.yaml deleted file mode 100644 index 8c4fb263bc6f09a54e3ff1151df6c0635c0003f0..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/test/templates/job.yaml +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: batch/v1 -kind: Job -metadata: - name: {{ .Release.Name }}-deployment - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{ template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - template: - metadata: - labels: - app: {{ template "selector.name" . }} - release: {{.Release.Name}} - annotations: - date: "{{ now | unixEpoch }}" - spec: - {{- if .Values.imagePullSecret }} - imagePullSecrets: - - name: {{.Values.imagePullSecret }} - {{ end }} - containers: - - name: {{ .Chart.Name }} - image: {{ .Values.image }} - imagePullPolicy: {{ .Values.pullPolicy }} - command: ["bash", "-c", "/opt/app/client.py --url ${SERVICE_URL} --iterations 100 --verbose"] - env: - - name: SERVICE_URL - value: {{ .Values.serviceUrl }} - restartPolicy: {{ .Values.restartPolicy }} - backoffLimit: {{ .Values.backoffLimit }} diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/test/values.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/test/values.yaml deleted file mode 100644 index a9b0625fe5881b256418513338dee5d7fec753ad..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/kubernetes/test/values.yaml +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -imagePullSecret: null -pullPolicy: Always -restartPolicy: OnFailure -backoffLimit: 4 -image: null -serviceUrl: "http://resnet-pytorch-example-service:8000" \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/server.py b/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/server.py deleted file mode 100644 index ef43ec190961cfe1c95dedc2161ba68918f46e79..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_resnet_pytorch/server.py +++ /dev/null @@ -1,104 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Simple classifier example based on Hugging Face Pytorch ResNet model.""" -import argparse -import io -import logging - -import numpy as np -import torch # pytype: disable=import-error -from PIL import Image # pytype: disable=import-error -from transformers import AutoFeatureExtractor, ResNetForImageClassification # pytype: disable=import-error - -from pytriton.decorators import batch -from pytriton.model_config import DynamicBatcher, ModelConfig, Tensor -from pytriton.triton import Triton - -logger = logging.getLogger("examples.huggingface_bart_pytorch.server") - -feature_extractor = AutoFeatureExtractor.from_pretrained("microsoft/resnet-50") -model = ResNetForImageClassification.from_pretrained("microsoft/resnet-50") - -DEVICE = "cuda" if torch.cuda.is_available() else "cpu" -model = model.to(DEVICE) - - -@batch -def _infer_fn(image: np.ndarray): - logger.debug(f"Image data: {image.shape} ({image.size})") - images = [] - for img in image: - img = Image.open(io.BytesIO(img.tobytes())) - images.append(img) - - inputs = feature_extractor(images, return_tensors="pt") - for name, value in inputs.items(): - inputs[name] = value.to(DEVICE) - - with torch.no_grad(): - logits = model(**inputs).logits - logits = logits.to("cpu") - - labels = [] - for logit in logits: - predicted_label = logit.argmax(-1).item() - label = np.char.encode(model.config.id2label[predicted_label], "utf-8") - labels.append([label]) - - return {"label": np.array(labels)} - - -def main(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--max-batch-size", - type=int, - default=32, - help="Batch size of request.", - required=False, - ) - parser.add_argument( - "--verbose", - action="store_true", - default=False, - ) - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - with Triton() as triton: - logger.info("Loading ResNet model.") - triton.bind( - model_name="ResNet", - infer_func=_infer_fn, - inputs=[ - Tensor(name="image", dtype=np.uint8, shape=(-1,)), - ], - outputs=[ - Tensor(name="label", dtype=bytes, shape=(1,)), - ], - config=ModelConfig( - max_batch_size=args.max_batch_size, - batcher=DynamicBatcher(max_queue_delay_microseconds=5000), # 5ms - ), - strict=True, - ) - logger.info("Serving inference") - triton.serve() - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/README.md b/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/README.md deleted file mode 100644 index f840a13b12f8edf5090de25961360609c1ac029e..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/README.md +++ /dev/null @@ -1,140 +0,0 @@ - - -# HuggingFace Stable Diffusion 1.5 model - -## Overview - -The example presents running HuggingFace Stable Diffusion 1.5 on PyTriton. - -Example consists of following scripts: - -- `install.sh` - install additional packages and libraries required to run the example -- `server.py` - start the model with Triton Inference Server -- `client.py` - execute HTTP/gRPC requests to the deployed model - -And configurations: - -- `kubernetes` - example Helm Charts for serving and test inference in Kubernetes cluster - -## Running example locally - -To run example locally the `torch` package. It can be installed in your current environment using pip: - -```shell -pip install torch -``` - -Or you can use NVIDIA PyTorch container: - -```shell -docker run -it --gpus 1 --shm-size 8gb -v {repository_path}:{repository_path} -w {repository_path} nvcr.io/nvidia/pytorch:23.10-py3 bash -``` - -If you select to use container we recommend to install -[NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/overview.html). - -The step-by-step guide: - -1. Install PyTriton following - the [installation instruction](../../README.md#installation) -2. Install the additional packages using `install.sh` - -```shell -./install.sh -``` - -3. In current terminal start the model on Triton using `server.py` - -```shell -./server.py -``` - -4. Open new terminal tab (ex. `Ctrl + T` on Ubuntu) or window -5. Go to the example directory -6. Run the `client.sh` to perform queries on model: - -```shell -./client.sh -``` - -## Running example on Kubernetes cluster - -The following prerequisites must be matched to run the example: - -- Kubernetes cluster with NVIDIA GPU node -- [NVIDIA Device Plugin](https://github.com/NVIDIA/k8s-device-plugin) installed in Kubernetes cluster -- Docker Containers Registry accessible from Kubernetes cluster -- [Installed Helm](https://helm.sh/docs/intro/install/) for creating the deployment and test job - -Optionally you may install NVIDIA Container Toolkit and NVIDIA GPU Operator which enable more features -like [MIG](https://docs.nvidia.com/datacenter/cloud-native/gpu-operator/gpu-operator-mig.html) or -[Time Slicing](https://docs.nvidia.com/datacenter/cloud-native/gpu-operator/gpu-sharing.html) support in the cluster. -To learn more how to set up Kubernetes cluster with NVIDIA GPU you can review [ -NVIDIA Cloud Native Documentation](https://docs.nvidia.com/datacenter/cloud-native/contents.html) - -Below, we present a step-by-step guide assuming that **all the commands are executed from the root of repository**. - -Follow these steps to run and test example in the cluster: -1. [Optional] Build PyTriton wheel following the [build instruction](../../docs/building.md) -2. Prepare the tag under which image is going to be pushed to your Docker Containers Registry accessible from Kubernetes -cluster. Example for local cluster (minikube, k3s) with registry hosted inside the cluster: -```shell -export DOCKER_IMAGE_NAME_WITH_TAG=localhost:5000/stable-diffusion-example:latest -``` -3. Build and push the Docker container image to your registry: - -```shell -# Export the base image used for build -export FROM_IMAGE_NAME=nvcr.io/nvidia/pytorch:23.10-py3 -./examples/huggingface_stable_diffusion/kubernetes/build_and_push.sh -``` - -**Note**: By default the container is built using `pytriton` package from `GitHub`. To build container with wheel built -locally use `export BUILD_FROM=dist` before executing script. - -4. Install the Helm Chart with deployment and service: - -```shell -helm upgrade -i --set deployment.image=${DOCKER_IMAGE_NAME_WITH_TAG} \ -stable-diffusion-example \ -./examples/huggingface_stable_diffusion/kubernetes/deployment -``` - -5. Install the Helm Chart with client test - -```shell -helm install --set image=${DOCKER_IMAGE_NAME_WITH_TAG} \ -stable-diffusion-example-test \ -./examples/huggingface_stable_diffusion/kubernetes/test -``` - -Now, you can review the logs from the running PODs to verify the inference is running. To show the logs from cluster -for given POD first list all running pods: -```shell -kubectl get pods -``` - -Next show logs from server or client: -```shell -kubectl logs {NAME} -``` - -To remove the installed charts simply run: -```shell -helm uninstall stable-diffusion-example-test -helm uninstall stable-diffusion-example -``` \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/__init__.py b/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/client.py b/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/client.py deleted file mode 100644 index b183e0f2105c4e4bac2ae094e80f7efce391900e..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/client.py +++ /dev/null @@ -1,107 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Client for Stable Diffusion 1.5.""" -import argparse -import base64 -import io -import logging -import pathlib - -import numpy as np -from PIL import Image # pytype: disable=import-error - -from pytriton.client import ModelClient - -logger = logging.getLogger("examples.huggingface_stable_diffusion.client") - - -def main(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--url", - default="localhost", - help=( - "Url to Triton server (ex. grpc://localhost:8001)." - "HTTP protocol with default port is used if parameter is not provided" - ), - required=False, - ) - parser.add_argument( - "--init-timeout-s", - type=float, - default=600.0, - help="Server and model ready state timeout in seconds", - required=False, - ) - parser.add_argument( - "--iterations", - type=int, - default=1, - help="Number of requests per client.", - required=False, - ) - parser.add_argument( - "--results-path", - type=str, - default="results", - help="Path to folder where images should be stored.", - required=False, - ) - parser.add_argument( - "--verbose", - action="store_true", - default=False, - ) - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - prompts = [ - "A photo of an astronaut riding a horse on mars", - "An image of a squirrel in Picasso style", - "A running dog in the fields of trees in Manga style", - ] - - img_size = np.array([[512]]) - results_path = pathlib.Path(args.results_path) - results_path.mkdir(parents=True, exist_ok=True) - - with ModelClient(args.url, "StableDiffusion_1_5", init_timeout_s=args.init_timeout_s) as client: - for req_idx in range(1, args.iterations + 1): - logger.debug(f"Sending request ({req_idx}).") - prompt_id = req_idx % len(prompts) - prompt = prompts[prompt_id] - prompt = np.array([[prompt]]) - prompt = np.char.encode(prompt, "utf-8") - logger.info(f"Prompt ({req_idx}): {prompt}") - logger.info(f"Image size ({req_idx}): {img_size}") - result_dict = client.infer_batch(prompt=prompt, img_size=img_size) - logger.debug(f"Result for for request ({req_idx}).") - - for idx, image in enumerate(result_dict["image"]): - file_idx = req_idx + idx - file_path = results_path / str(file_idx) / "image.jpeg" - file_path.parent.mkdir(parents=True, exist_ok=True) - msg = base64.b64decode(image[0]) - buffer = io.BytesIO(msg) - image = Image.open(buffer) - with file_path.open("wb") as fp: - image.save(fp) - logger.info(f"Image saved to {file_path}") - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/install.sh b/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/install.sh deleted file mode 100644 index 55b04ad0b56cfde538645ada3726955a48ddf7db..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/install.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -set -xe - -pip install transformers accelerate diffusers Pillow filelock \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/Dockerfile b/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/Dockerfile deleted file mode 100644 index a0cac6ef721fe3ea8a9b0b9882999e3c91034a39..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/Dockerfile +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -ARG FROM_IMAGE_NAME=nvcr.io/nvidia/pytorch:23.10-py3 -ARG BUILD_FROM - -FROM ${FROM_IMAGE_NAME} as base -WORKDIR /opt/app - -# Use when build PyTriton from source -FROM base as install-from-dist -COPY dist/*.whl /opt/app -RUN pip install /opt/app/*.whl - -# Install from pypi -FROM base as install-from-pypi -RUN pip install -U nvidia-pytriton - -FROM install-from-${BUILD_FROM} AS image - -ENV PYTHONUNBUFFERED=1 - -WORKDIR /opt/app - -COPY examples/huggingface_stable_diffusion/install.sh /opt/app -RUN /opt/app/install.sh - -COPY examples/huggingface_stable_diffusion/client.py /opt/app -COPY examples/huggingface_stable_diffusion/server.py /opt/app - -ENTRYPOINT [] \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/build_and_push.sh b/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/build_and_push.sh deleted file mode 100644 index a7711154ca3f39236e442c4083a6e255d08cea8e..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/build_and_push.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -if [ -z ${DOCKER_IMAGE_NAME_WITH_TAG} ]; then - echo "Provide Docker image name under to push the created image to your registry" - echo "Example:" - echo " export DOCKER_IMAGE_NAME_WITH_TAG=my-registry:5000/stable-diffusion-example:latest" - exit 1 -fi - -if [ -z ${FROM_IMAGE_NAME} ]; then - echo "Provide Docker image that would be used as base image" - echo "Example:" - echo " export FROM_IMAGE_NAME=nvcr.io/nvidia/pytorch:23.10-py3" - exit 1 -fi - -BUILD_FROM="${BUILD_FROM:-pypi}" -if [[ ${BUILD_FROM} != "pypi" ]] && [[ ${BUILD_FROM} != "dist" ]]; then - echo "The BUILD_FROM variable should be equal to 'pypi' or 'dist'" - echo "Example:" - echo " export BUILD_FROM=dist" - exit 1 -fi - -set -xe - -DOCKER_BUILDKIT=1 docker build -f examples/huggingface_stable_diffusion/kubernetes/Dockerfile \ - -t ${DOCKER_IMAGE_NAME_WITH_TAG} \ - --build-arg FROM_IMAGE_NAME=${FROM_IMAGE_NAME} \ - --build-arg BUILD_FROM=${BUILD_FROM} . -docker push ${DOCKER_IMAGE_NAME_WITH_TAG} \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/deployment/Chart.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/deployment/Chart.yaml deleted file mode 100644 index ba30d685d156708c715324014eb9bf15caa87737..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/deployment/Chart.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v2 -description: Stable Diffusion 1.5 PyTriton Example -name: stable-diffusion-example -version: 0.0.1 -appVersion: 0.0.1 diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/deployment/templates/_helpers.tpl b/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/deployment/templates/_helpers.tpl deleted file mode 100644 index 79274ecc63d15d74bdd0b20f9fcf67a6666ee3e6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/deployment/templates/_helpers.tpl +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -{{/* vim: set filetype=mustache: */}} -{{/* -Expand the name of the chart. -*/}} -{{- define "selector.name" -}} -{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} -{{- end -}} - -{{/* -Create a default fully qualified app name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -If release name contains chart name it will be used as a full name. -*/}} -{{- define "selector.fullname" -}} -{{- if .Values.fullnameOverride -}} -{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- $name := default .Chart.Name .Values.nameOverride -}} -{{- if contains $name .Release.Name -}} -{{- .Release.Name | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} -{{- end -}} -{{- end -}} -{{- end -}} - -{{/* -Create chart name and version as used by the chart label. -*/}} -{{- define "selector.chart" -}} -{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} -{{- end -}} diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/deployment/templates/deployment.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/deployment/templates/deployment.yaml deleted file mode 100644 index 734769096323553d9d0ad90bbf7810f2f8648e18..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/deployment/templates/deployment.yaml +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: {{ .Release.Name }}-deployment - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{ template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - replicas: {{ .Values.replicaCount }} - selector: - matchLabels: - app: {{ template "selector.name" . }} - release: {{ .Release.Name }} - template: - metadata: - labels: - app: {{ template "selector.name" . }} - release: {{.Release.Name}} - annotations: - date: "{{ now | unixEpoch }}" - spec: - {{- if .Values.imagePullSecret }} - imagePullSecrets: - - name: {{.Values.imagePullSecret }} - {{ end }} - restartPolicy: {{ .Values.restartPolicy }} - containers: - - name: {{ .Chart.Name }} - image: {{ .Values.deployment.image }} - imagePullPolicy: {{ .Values.pullPolicy }} - command: ["bash", "-c", "/opt/app/server.py --verbose"] - ports: - - containerPort: 8000 - name: http - - containerPort: 8001 - name: grpc - - containerPort: 8002 - name: metrics - livenessProbe: - initialDelaySeconds: 180 - periodSeconds: 10 - httpGet: - path: /v2/health/live - port: http - readinessProbe: - initialDelaySeconds: 180 - periodSeconds: 10 - httpGet: - path: /v2/health/ready - port: http - resources: - requests: - nvidia.com/gpu: {{ .Values.deployment.numOfGPUs }} - limits: - nvidia.com/gpu: {{ .Values.deployment.numOfGPUs }} - volumeMounts: - - name: shared-memory - mountPath: /dev/shm - volumes: - - name: shared-memory - emptyDir: - medium: Memory diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/deployment/templates/service.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/deployment/templates/service.yaml deleted file mode 100644 index 79d7c7f62cd8d714edbd6aec8721d7691007d7c8..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/deployment/templates/service.yaml +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -kind: Service -metadata: - name: {{ template "selector.fullname" . }}-service - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - type: {{ .Values.service.type }} - ports: - - port: 8000 - targetPort: 8000 - name: http - - port: 8001 - targetPort: 8001 - name: grpc - - port: 8002 - targetPort: 8002 - name: metrics - selector: - app: {{ template "selector.name" . }} - release: {{ .Release.Name }} diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/deployment/values.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/deployment/values.yaml deleted file mode 100644 index 6c9106bbcc5ce27fa872fa3e4d7e4529872eaff9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/deployment/values.yaml +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -imagePullSecret: null -pullPolicy: Always -restartPolicy: Always -replicaCount: 1 -deployment: - image: null - numOfGPUs: 1 -service: - type: ClusterIP diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/test/Chart.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/test/Chart.yaml deleted file mode 100644 index dbc11193302e309bead0e7abc53744c6f0c9c95c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/test/Chart.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v2 -description: Stable Diffusion PyTriton Example Test -name: stable-diffusion-example-test -version: 0.0.1 -appVersion: 0.0.1 diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/test/templates/_helpers.tpl b/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/test/templates/_helpers.tpl deleted file mode 100644 index 79274ecc63d15d74bdd0b20f9fcf67a6666ee3e6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/test/templates/_helpers.tpl +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -{{/* vim: set filetype=mustache: */}} -{{/* -Expand the name of the chart. -*/}} -{{- define "selector.name" -}} -{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} -{{- end -}} - -{{/* -Create a default fully qualified app name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -If release name contains chart name it will be used as a full name. -*/}} -{{- define "selector.fullname" -}} -{{- if .Values.fullnameOverride -}} -{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- $name := default .Chart.Name .Values.nameOverride -}} -{{- if contains $name .Release.Name -}} -{{- .Release.Name | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} -{{- end -}} -{{- end -}} -{{- end -}} - -{{/* -Create chart name and version as used by the chart label. -*/}} -{{- define "selector.chart" -}} -{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} -{{- end -}} diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/test/templates/job.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/test/templates/job.yaml deleted file mode 100644 index 8c4fb263bc6f09a54e3ff1151df6c0635c0003f0..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/test/templates/job.yaml +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: batch/v1 -kind: Job -metadata: - name: {{ .Release.Name }}-deployment - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{ template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - template: - metadata: - labels: - app: {{ template "selector.name" . }} - release: {{.Release.Name}} - annotations: - date: "{{ now | unixEpoch }}" - spec: - {{- if .Values.imagePullSecret }} - imagePullSecrets: - - name: {{.Values.imagePullSecret }} - {{ end }} - containers: - - name: {{ .Chart.Name }} - image: {{ .Values.image }} - imagePullPolicy: {{ .Values.pullPolicy }} - command: ["bash", "-c", "/opt/app/client.py --url ${SERVICE_URL} --iterations 100 --verbose"] - env: - - name: SERVICE_URL - value: {{ .Values.serviceUrl }} - restartPolicy: {{ .Values.restartPolicy }} - backoffLimit: {{ .Values.backoffLimit }} diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/test/values.yaml b/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/test/values.yaml deleted file mode 100644 index 43af1775aa7e13436a096c2220c9f6c6c4483785..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/kubernetes/test/values.yaml +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -imagePullSecret: null -pullPolicy: Always -restartPolicy: OnFailure -backoffLimit: 4 -image: null -serviceUrl: "http://stable-diffusion-example-service:8000" \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/server.py b/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/server.py deleted file mode 100644 index 07ddea109ee49e8919dca1285a7bf5d8357e2e08..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/huggingface_stable_diffusion/server.py +++ /dev/null @@ -1,117 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Server for Stable Diffusion 1.5.""" -import argparse -import base64 -import io -import logging - -import numpy as np -import torch # pytype: disable=import-error -from diffusers import StableDiffusionPipeline # pytype: disable=import-error - -from pytriton.decorators import batch, first_value, group_by_values -from pytriton.model_config import DynamicBatcher, ModelConfig, Tensor -from pytriton.triton import Triton, TritonConfig - -LOGGER = logging.getLogger("examples.huggingface_stable_diffusion.server") - -DEVICE = "cuda" if torch.cuda.is_available() else "cpu" -IMAGE_FORMAT = "JPEG" - -pipe = StableDiffusionPipeline.from_pretrained("runwayml/stable-diffusion-v1-5", torch_dtype=torch.float16) -pipe = pipe.to(DEVICE) - - -def _encode_image_to_base64(image): - raw_bytes = io.BytesIO() - image.save(raw_bytes, IMAGE_FORMAT) - raw_bytes.seek(0) # return to the start of the buffer - return base64.b64encode(raw_bytes.read()) - - -@batch -@group_by_values("img_size") -@first_value("img_size") -def _infer_fn( - prompt: np.ndarray, - img_size: np.int64, -): - prompts = [np.char.decode(p.astype("bytes"), "utf-8").item() for p in prompt] - LOGGER.debug(f"Prompts: {prompts}") - LOGGER.debug(f"Image Size: {img_size}x{img_size}") - - outputs = [] - for idx, image in enumerate( - pipe( - prompt=prompts, - height=img_size, - width=img_size, - ).images - ): - raw_data = _encode_image_to_base64(image) - outputs.append([raw_data]) - LOGGER.debug(f"Generated result for prompt `{prompts[idx]}` with size {len(raw_data)}") - - LOGGER.debug(f"Prepared batch response of size: {len(outputs)}") - return {"image": np.array(outputs)} - - -def _parse_args(): - parser = argparse.ArgumentParser() - parser.add_argument( - "--verbose", - "-v", - action="store_true", - help="Enable verbose logging in debug mode.", - ) - return parser.parse_args() - - -def main(): - """Initialize server with model.""" - args = _parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - log_verbose = 1 if args.verbose else 0 - config = TritonConfig(exit_on_error=True, log_verbose=log_verbose) - - with Triton(config=config) as triton: - LOGGER.info("Loading the pipeline") - triton.bind( - model_name="StableDiffusion_1_5", - infer_func=_infer_fn, - inputs=[ - Tensor(name="prompt", dtype=np.bytes_, shape=(1,)), - Tensor(name="img_size", dtype=np.int64, shape=(1,)), - ], - outputs=[ - Tensor(name="image", dtype=np.bytes_, shape=(1,)), - ], - config=ModelConfig( - max_batch_size=4, - batcher=DynamicBatcher( - max_queue_delay_microseconds=100, - ), - ), - strict=True, - ) - triton.serve() - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/identity_python/README.md b/stf/stf-api-alternative/pytriton/examples/identity_python/README.md deleted file mode 100644 index cc298d7c42d89f47cdbab029d30d362d08d964da..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/identity_python/README.md +++ /dev/null @@ -1,53 +0,0 @@ - - -# Identity Python Model - -## Overview - -The example presents a simple Identity model which pass the inputs to the outputs. - -Example consists of following scripts: - -- `install.sh` - install additional dependencies -- `server.py` - start the model with Triton Inference Server -- `client.py` - execute HTTP/gRPC requests to the deployed model - -## Quick Start - -The step-by-step guide: - -1. Install PyTriton following - the [installation instruction](../../README.md#installation) -2. Install the additional packages using `install.sh` - -```shell -./install.sh -``` - -3. In current terminal start the model on Triton using `server.py` - -```shell -./server.py -``` - -4. Open new terminal tab (ex. `Ctrl + T` on Ubuntu) or window -5. Go to the example directory -6. Run the `client.py` to perform queries on model: - -```shell -./client.py -``` diff --git a/stf/stf-api-alternative/pytriton/examples/identity_python/__init__.py b/stf/stf-api-alternative/pytriton/examples/identity_python/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/identity_python/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/identity_python/client.py b/stf/stf-api-alternative/pytriton/examples/identity_python/client.py deleted file mode 100644 index 65081777b10ca181964598181eb3ecddb10892f8..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/identity_python/client.py +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Client for identity_python sample server.""" -import logging -import random - -import numpy as np - -from pytriton.client import ModelClient - -logger = logging.getLogger("examples.identity_python.client") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - -batch_size = 4 -input1_batch = [[random.random(), random.random(), random.random(), random.random()]] * batch_size -input2_batch = [[b"\xff\x00\x00\x00"]] * batch_size - -logger.info(f"INPUT_1: {input1_batch}") -logger.info(f"INPUT_2: {input2_batch}") - -input1_batch = np.array(input1_batch, dtype=np.float64) -input2_batch = np.array(input2_batch, dtype=object) # use dtype=object to avoid trimming of `\x00` bytes by numpy - -with ModelClient("localhost", "Identity") as client: - logger.info("Sending request") - result_dict = client.infer_batch(input1_batch, input2_batch) - logger.info(f"results: {result_dict}") - -for output_name, output_batch in result_dict.items(): - logger.info(f"{output_name}: {output_batch.tolist()}") diff --git a/stf/stf-api-alternative/pytriton/examples/identity_python/install.sh b/stf/stf-api-alternative/pytriton/examples/identity_python/install.sh deleted file mode 100644 index 73854bc2993639c6f6ee7387dcb8880c8cf10e84..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/identity_python/install.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -set -xe - -pip install numpy \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/identity_python/server.py b/stf/stf-api-alternative/pytriton/examples/identity_python/server.py deleted file mode 100644 index 30c1ac2a30075c418d78996b1101a4ebb975c223..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/identity_python/server.py +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Very simple example with python identity operation.""" -import logging - -import numpy as np - -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton - -logger = logging.getLogger("examples.identity_python.server") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - -def _infer_raw_fn(inputs): # noqa: N803 - return [ - { - "OUTPUT_1": request["INPUT_1"], - "OUTPUT_2": request["INPUT_2"], - } - for request in inputs - ] - - -@batch -def _infer_fn(**inputs): # noqa: N803 - return { - "OUTPUT_1": inputs["INPUT_1"], - "OUTPUT_2": inputs["INPUT_2"], - } - - -with Triton() as triton: - logger.info("Loading Identity model.") - triton.bind( - model_name="Identity", - infer_func=_infer_fn, - inputs=[ - Tensor(dtype=np.float64, shape=(-1,)), - Tensor(dtype=object, shape=(1,)), - ], - outputs=[ - Tensor(dtype=np.float64, shape=(-1,)), - Tensor(dtype=object, shape=(1,)), - ], - config=ModelConfig(max_batch_size=128), - strict=True, - ) - logger.info("Serving inference") - triton.serve() diff --git a/stf/stf-api-alternative/pytriton/examples/linear_cupy/README.md b/stf/stf-api-alternative/pytriton/examples/linear_cupy/README.md deleted file mode 100644 index 8d2cf4958d382e23d369ad13c7cafdce031fd3ba..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/linear_cupy/README.md +++ /dev/null @@ -1,52 +0,0 @@ - - -# Linear CuPy Model - -## Overview - -The example presents a simple Linear model using RAPIDS/CuPy library. - -Example consists of following scripts: - -- `install.sh` - install additional dependencies -- `server.py` - start the model with Triton Inference Server -- `client.py` - execute HTTP/gRPC requests to the deployed model - -## Quick Start - -The step-by-step guide: - -1. Install PyTriton following the [installation instruction](../../README.md#installation) -2. Install the additional packages using `install.sh` - -```shell -./install.sh -``` - -3. In current terminal start the model on Triton using `server.py` - -```shell -./server.py -``` - -4. Open new terminal tab (ex. `Ctrl + T` on Ubuntu) or window -5. Go to the example directory -6. Run the `client.py` to perform queries on model: - -```shell -./client.py -``` diff --git a/stf/stf-api-alternative/pytriton/examples/linear_cupy/__init__.py b/stf/stf-api-alternative/pytriton/examples/linear_cupy/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/linear_cupy/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/linear_cupy/client.py b/stf/stf-api-alternative/pytriton/examples/linear_cupy/client.py deleted file mode 100644 index 5d6a2677a5abfb1dd4cf6b4ac4b8e77daf402589..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/linear_cupy/client.py +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Client for add_sub_python sample server.""" -import logging - -import numpy as np - -from pytriton.client import ModelClient - -logger = logging.getLogger("examples.linear_cupy.client") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - -VECTOR_SIZE = 10 -BATCH_SIZE = 2 - -u_batch = np.ones((BATCH_SIZE, VECTOR_SIZE), dtype=np.float64) -v_batch = np.ones((BATCH_SIZE, VECTOR_SIZE), dtype=np.float64) - -logger.info(f"u: {u_batch.tolist()}") -logger.info(f"v: {v_batch.tolist()}") - -with ModelClient("localhost", "Linear") as client: - logger.info("Sending inference request") - result_batch = client.infer_batch(u_batch, v_batch) - -for output_name, data_batch in result_batch.items(): - logger.info(f"{output_name}: {data_batch.tolist()}") diff --git a/stf/stf-api-alternative/pytriton/examples/linear_cupy/install.sh b/stf/stf-api-alternative/pytriton/examples/linear_cupy/install.sh deleted file mode 100644 index ebfa68dd00de8768e78ecaecf50508f91c713c19..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/linear_cupy/install.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -set -xe - -pip install numpy -pip install cupy-cuda12x --extra-index-url=https://pypi.ngc.nvidia.com diff --git a/stf/stf-api-alternative/pytriton/examples/linear_cupy/server.py b/stf/stf-api-alternative/pytriton/examples/linear_cupy/server.py deleted file mode 100644 index d4702c1f0fb7c7e185da8e71dc88d08065303315..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/linear_cupy/server.py +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Server with simple python model performing adding and subtract operation.""" -import logging - -import cupy as cp # pytype: disable=import-error -import numpy as np - -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton - -LOGGER = logging.getLogger("examples.linear_cupy.server") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - -VECTOR_SIZE = 10 - - -class LinearModel: - def __init__(self): - self.alpha = 2 - self.beta = cp.arange(VECTOR_SIZE) - - @batch - def linear(self, **inputs): - u_batch, v_batch = inputs.values() - u_batch_cp, v_batch_cp = cp.asarray(u_batch), cp.asarray(v_batch) - lin = u_batch_cp * self.alpha + v_batch_cp + self.beta - return {"result": cp.asnumpy(lin)} - - -with Triton() as triton: - LOGGER.info("Loading linear model") - lin_model = LinearModel() - triton.bind( - model_name="Linear", - infer_func=lin_model.linear, - inputs=[ - Tensor(dtype=np.float64, shape=(VECTOR_SIZE,)), - Tensor(dtype=np.float64, shape=(VECTOR_SIZE,)), - ], - outputs=[ - Tensor(name="result", dtype=np.float64, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128), - strict=True, - ) - LOGGER.info("Serving model") - triton.serve() diff --git a/stf/stf-api-alternative/pytriton/examples/linear_cupy_notebook/README.md b/stf/stf-api-alternative/pytriton/examples/linear_cupy_notebook/README.md deleted file mode 100644 index 2f6935f5baf3949daee4a9b285e2ddd60dd0e398..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/linear_cupy_notebook/README.md +++ /dev/null @@ -1,36 +0,0 @@ - - -# Linear CuPy Model in Jupyter Notebook - -## Overview - -The example presents a simple Linear model in Jupyter Notebook using RAPIDS/CuPy library. - -Example consists of following scripts: - -- `linear.ipynb` - Jupyter Notebook file - -## Quick Start - -The step-by-step guide: - -1. Install PyTriton following the [installation instruction](../../README.md#installation) -2. In current terminal run jupyter notebook and open `linear.ipynb` file - -```shell -jupyter notebook -``` diff --git a/stf/stf-api-alternative/pytriton/examples/linear_cupy_notebook/__init__.py b/stf/stf-api-alternative/pytriton/examples/linear_cupy_notebook/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/linear_cupy_notebook/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/linear_cupy_notebook/linear.ipynb b/stf/stf-api-alternative/pytriton/examples/linear_cupy_notebook/linear.ipynb deleted file mode 100644 index 18088cde19105d1df007ea7ee5ab6073aee846fe..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/linear_cupy_notebook/linear.ipynb +++ /dev/null @@ -1,229 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Example of using Triton Server Wrapper with RAPIDS/CuPy library in Jupyter Notebook" - ] - }, - { - "cell_type": "markdown", - "source": [ - "### Pure Python/CuPy and Triton Wrapper equivalent of The RAPIDS-Triton Linear Example:\n", - " https://github.com/rapidsai/rapids-triton-linear-example#the-rapids-triton-linear-example\n", - " (Remark: Above example is focused on latency minimization - our equivalent is focused on easy of use)" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%% md\n" - } - } - }, - { - "cell_type": "markdown", - "source": [ - "## Triton server setup with custom linear model" - ], - "metadata": { - "collapsed": false - } - }, - { - "cell_type": "markdown", - "source": [ - "Install dependencies" - ], - "metadata": { - "collapsed": false - } - }, - { - "cell_type": "code", - "execution_count": null, - "outputs": [], - "source": [ - "import sys\n", - "!{sys.executable} -m pip install numpy" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - } - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Required imports:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "pycharm": { - "name": "#%%\n" - } - }, - "outputs": [], - "source": [ - "import numpy as np\n", - "import cupy as cp\n", - "\n", - "from pytriton.model_config import ModelConfig, Tensor\n", - "from pytriton.triton import Triton\n", - "from pytriton.decorators import batch" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Define linear model (for simplicity, sample model parameters are defined in class initializer):" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "VECTOR_SIZE = 10\n", - "\n", - "class LinearModel:\n", - " def __init__(self):\n", - " self.alpha = 2\n", - " self.beta = cp.arange(VECTOR_SIZE)\n", - "\n", - " @batch\n", - " def linear(self, **inputs):\n", - " u_batch, v_batch = inputs.values()\n", - " u_batch_cp, v_batch_cp = cp.asarray(u_batch), cp.asarray(v_batch)\n", - " lin = u_batch_cp * self.alpha + v_batch_cp + self.beta\n", - " return {\"lin\": cp.asnumpy(lin)}" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Instantiate titon wrapper class and load model with defined callable:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "triton = Triton()\n", - "lin_model = LinearModel()\n", - "triton.bind(\n", - " model_name=\"Linear\",\n", - " infer_func=lin_model.linear,\n", - " inputs=[\n", - " Tensor(dtype=np.float64, shape=(VECTOR_SIZE,)),\n", - " Tensor(dtype=np.float64, shape=(VECTOR_SIZE,)),\n", - " ],\n", - " outputs=[\n", - " Tensor(name=\"lin\", dtype=np.float64, shape=(-1,)),\n", - " ],\n", - " config=ModelConfig(max_batch_size=128),\n", - " strict=True,\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Run triton server with defined model inference callable" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "triton.run()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Example inference performed with ModelClient calling triton server" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from pytriton.client import ModelClient\n", - "\n", - "VECTOR_SIZE = 10\n", - "BATCH_SIZE = 2\n", - "\n", - "u_batch = np.ones((BATCH_SIZE, VECTOR_SIZE), dtype=np.float64)\n", - "v_batch = np.ones((BATCH_SIZE, VECTOR_SIZE), dtype=np.float64)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "with ModelClient(\"localhost\", \"Linear\") as client:\n", - " result_batch = client.infer_batch(u_batch, v_batch)\n", - "\n", - "for output_name, data_batch in result_batch.items():\n", - " print(f\"{output_name}: {data_batch.tolist()}\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Stop triton server at the end" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "triton.stop()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.10" - } - }, - "nbformat": 4, - "nbformat_minor": 1 -} diff --git a/stf/stf-api-alternative/pytriton/examples/linear_random_pytorch/README.md b/stf/stf-api-alternative/pytriton/examples/linear_random_pytorch/README.md deleted file mode 100644 index bccbec568a20174699c0b497c6d51c17d7d2d282..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/linear_random_pytorch/README.md +++ /dev/null @@ -1,62 +0,0 @@ - - -# Linear model in PyTorch - -## Overview - -The example presents a simple Linear model implemented in PyTorch - -Example consists of following scripts: - -- `server.py` - start the model with Triton Inference Server -- `client.py` - execute HTTP/gRPC requests to the deployed model - -## Requirements - -The example requires the `torch` package. It can be installed in your current environment using pip: - -```shell -pip install torch -``` - -Or you can use NVIDIA PyTorch container: -```shell -docker run -it --gpus 1 --shm-size 8gb -v {repository_path}:{repository_path} -w {repository_path} nvcr.io/nvidia/pytorch:23.10-py3 bash -``` - -If you select to use container we recommend to install -[NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/overview.html). - -## Quick Start - -The step-by-step guide: - -1. Install PyTriton following the [installation instruction](../../README.md#installation) -2. In current terminal start the model on Triton using `server.py` - -```shell -./server.py -``` - -3. Open new terminal tab (ex. `Ctrl + T` on Ubuntu) or window -4. Go to the example directory -5. Run the `client.py` to perform queries on model: - -```shell -./client.py -``` - diff --git a/stf/stf-api-alternative/pytriton/examples/linear_random_pytorch/__init__.py b/stf/stf-api-alternative/pytriton/examples/linear_random_pytorch/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/linear_random_pytorch/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/linear_random_pytorch/client.py b/stf/stf-api-alternative/pytriton/examples/linear_random_pytorch/client.py deleted file mode 100644 index 954834d037a0f29d2ba6bc92dc1a487e1602f687..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/linear_random_pytorch/client.py +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Client for linear_random example.""" -import argparse -import logging - -import torch # pytype: disable=import-error - -from pytriton.client import ModelClient - - -def main(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--url", - default="localhost", - help=( - "Url to Triton server (ex. grpc://localhost:8001)." - "HTTP protocol with default port is used if parameter is not provided" - ), - required=False, - ) - args = parser.parse_args() - logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - logger = logging.getLogger("examples.linear_random_pytorch.client") - - input1_batch = torch.randn(128, 20).cpu().detach().numpy() - - logger.info(f"Input: {input1_batch.tolist()}") - - with ModelClient(args.url, "Linear") as client: - logger.info("Sending request") - result_dict = client.infer_batch(input1_batch) - - for output_name, output_batch in result_dict.items(): - logger.info(f"{output_name}: {output_batch.tolist()}") - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/linear_random_pytorch/server.py b/stf/stf-api-alternative/pytriton/examples/linear_random_pytorch/server.py deleted file mode 100644 index a40221feb1094fab503be9cbc3fb810d7dd82689..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/linear_random_pytorch/server.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Example with single random Linear implemented with PyTorch framework.""" -import logging - -import numpy as np -import torch # pytype: disable=import-error - -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton - -DEVICE = "cuda" if torch.cuda.is_available() else "cpu" -MODEL = torch.nn.Linear(20, 30).to(DEVICE).eval() - - -@batch -def _infer_fn(**inputs): - (input1_batch,) = inputs.values() - input1_batch_tensor = torch.from_numpy(input1_batch).to(DEVICE) - output1_batch_tensor = MODEL(input1_batch_tensor) - output1_batch = output1_batch_tensor.cpu().detach().numpy() - return [output1_batch] - - -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") -logger = logging.getLogger("examples.linear_random_pytorch.server") - -with Triton() as triton: - logger.info("Loading Linear model.") - triton.bind( - model_name="Linear", - infer_func=_infer_fn, - inputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - ], - outputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128), - strict=True, - ) - logger.info("Serving models") - triton.serve() diff --git a/stf/stf-api-alternative/pytriton/examples/mlp_random_tensorflow2/README.md b/stf/stf-api-alternative/pytriton/examples/mlp_random_tensorflow2/README.md deleted file mode 100644 index dfc61e3887c70eb9fbe6e0f4dcce8ce5abae9f3b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/mlp_random_tensorflow2/README.md +++ /dev/null @@ -1,68 +0,0 @@ - - -# Multi-Layer TensorFlow2 Model - -## Overview - -The example presents a simple Multi-Layer model implemented in TensorFlow2 - -Example consists of following scripts: - -- `server.py` - start the model with Triton Inference Server -- `client.py` - execute HTTP/gRPC requests to the deployed model - -## Requirements - -The example requires the `tensorflow` package. It can be installed in your current environment using pip: - -```shell -pip install tensorflow -``` - -Or you can use NVIDIA TensorFlow container: -```shell -docker run -it --gpus 1 --shm-size 8gb -v {repository_path}:{repository_path} -w {repository_path} nvcr.io/nvidia/tensorflow:23.10-tf2-py3 bash -``` - -If you select to use container we recommend to install -[NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/overview.html). - -## Quick Start - -The step-by-step guide: - -1. Install PyTriton following the [installation instruction](../../README.md#installation) -2. Install the additional packages using `install.sh` - -```shell -./install.sh -``` - -3. In current terminal start the model on Triton using `server.py` - -```shell -./server.py -``` - -4. Open new terminal tab (ex. `Ctrl + T` on Ubuntu) or window -5. Go to the example directory -6. Run the `client.py` to perform queries on model: - -```shell -./client.py -``` - diff --git a/stf/stf-api-alternative/pytriton/examples/mlp_random_tensorflow2/__init__.py b/stf/stf-api-alternative/pytriton/examples/mlp_random_tensorflow2/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/mlp_random_tensorflow2/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/mlp_random_tensorflow2/client.py b/stf/stf-api-alternative/pytriton/examples/mlp_random_tensorflow2/client.py deleted file mode 100644 index e96e8f5f9a7d69185aa8c364de2a9600ba761892..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/mlp_random_tensorflow2/client.py +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Client for mlp_random example.""" -import logging - -import numpy as np - -from pytriton.client import ModelClient - -logger = logging.getLogger("examples.mlp_random_tensorflow2.client") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - -batch_size = 16 -image_size = (224, 224, 3) -images_batch = np.random.uniform(size=(batch_size,) + image_size).astype(np.float32) - -logger.info(f"Input: {images_batch}") - -with ModelClient("localhost", "MLP") as client: - logger.info("Sending request") - result_dict = client.infer_batch(images_batch) - -logger.info(f"results: {result_dict}") diff --git a/stf/stf-api-alternative/pytriton/examples/mlp_random_tensorflow2/server.py b/stf/stf-api-alternative/pytriton/examples/mlp_random_tensorflow2/server.py deleted file mode 100644 index 2be9bebfeb29a2f97b0619ca742e9f031fe8db71..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/mlp_random_tensorflow2/server.py +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Example with random MLP implemented with TF2 framework.""" -import logging - -import numpy as np -import tensorflow as tf # pytype: disable=import-error - -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton - -logger = logging.getLogger("examples.mlp_random_tensorflow2.server") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - -def _get_model(): # Load model into Triton Inference Server - input_layer = tf.keras.layers.Input((224, 224, 3)) - layer_output = tf.keras.layers.Lambda(lambda x: x)(input_layer) - model_output = tf.keras.layers.Lambda(lambda x: x)(layer_output) - model = tf.keras.Model(input_layer, model_output) - return model - - -MODEL = _get_model() - - -@batch -def _infer_fn(image): - images_batch_tensor = tf.convert_to_tensor(image) - output1_batch = MODEL.predict(images_batch_tensor) - return [output1_batch] - - -with Triton() as triton: - logger.info("Loading MLP model.") - triton.bind( - model_name="MLP", - infer_func=_infer_fn, - inputs=[ - Tensor(name="image", dtype=np.float32, shape=(224, 224, 3)), - ], - outputs=[ - Tensor(name="output", dtype=np.float32, shape=(224, 224, 3)), - ], - config=ModelConfig(max_batch_size=16), - strict=True, - ) - logger.info("Serving inference") - triton.serve() diff --git a/stf/stf-api-alternative/pytriton/examples/multi_instance_resnet50_pytorch/README.md b/stf/stf-api-alternative/pytriton/examples/multi_instance_resnet50_pytorch/README.md deleted file mode 100644 index 0550fa945eac59458973bfd75de6c4670a250260..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/multi_instance_resnet50_pytorch/README.md +++ /dev/null @@ -1,68 +0,0 @@ - - -# Multi-Instance Deployment of ResNet50 PyTorch Model - -## Overview - -The example presents a deployment of Multi-Instance ResNet50 PyTorch model. The model is deployed multiple times what -improve the throughput of the model when GPU is underutilized. The model by default is deployed on same GPU twice. - -Example consists of following scripts: - -- `install.sh` - install additional dependencies for downloading model from HuggingFace -- `server.py` - start the model with Triton Inference Server -- `client.sh` - execute Perf Analyzer to measure the performance - -## Requirements - -The example requires the `torch` package. It can be installed in your current environment using pip: - -```shell -pip install torch -``` - -Or you can use NVIDIA PyTorch container: -```shell -docker run -it --gpus 1 --shm-size 8gb -v {repository_path}:{repository_path} -w {repository_path} nvcr.io/nvidia/pytorch:23.10-py3 bash -``` - -If you select to use container we recommend to install -[NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/overview.html). - -## Quick Start - -The step-by-step guide: - -1. Install PyTriton following the [installation instruction](../../README.md#installation) -2. Install the additional packages using `install.sh` - -```shell -./install.sh -``` - -3. In current terminal start the model on Triton using `server.py` - -```shell -./server.py -``` - -4. Open new terminal tab (ex. `Ctrl + T` on Ubuntu) or window -5. Go to the example directory -6. Run the `client.sh` to run performance measurement on model: -```shell -./client.sh -``` \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/multi_instance_resnet50_pytorch/__init__.py b/stf/stf-api-alternative/pytriton/examples/multi_instance_resnet50_pytorch/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/multi_instance_resnet50_pytorch/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/multi_instance_resnet50_pytorch/client.sh b/stf/stf-api-alternative/pytriton/examples/multi_instance_resnet50_pytorch/client.sh deleted file mode 100644 index 0c7c384285a6b359cd9e0c8a72194eaa7ce7c9f7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/multi_instance_resnet50_pytorch/client.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -perf_analyzer -u 127.0.0.1:8000 \ - -i http \ - -m ResNet50 \ - --measurement-mode count_windows \ - --measurement-request-count 100 \ - --concurrency-range 64:64:64 \ - -v diff --git a/stf/stf-api-alternative/pytriton/examples/multi_instance_resnet50_pytorch/install.sh b/stf/stf-api-alternative/pytriton/examples/multi_instance_resnet50_pytorch/install.sh deleted file mode 100644 index f5af9cd139aaf49f3a8bf7e9f5c939eda674ebcc..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/multi_instance_resnet50_pytorch/install.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -set -xe - -apt-get update -apt-get install -y libb64-dev -pip install transformers nvidia-pytriton diff --git a/stf/stf-api-alternative/pytriton/examples/multi_instance_resnet50_pytorch/server.py b/stf/stf-api-alternative/pytriton/examples/multi_instance_resnet50_pytorch/server.py deleted file mode 100644 index 1972c20d0fe8a37379aebab4f34229cbf02f7f8c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/multi_instance_resnet50_pytorch/server.py +++ /dev/null @@ -1,116 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import argparse -import logging -from typing import Any, List - -import numpy as np -import torch # pytype: disable=import-error -from transformers import ResNetForImageClassification # pytype: disable=import-error - -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton - -logger = logging.getLogger("examples.multi_instance_resnet50_pytorch.server") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - -DEVICE = "cuda:0" - - -class _InferFuncWrapper: - def __init__(self, model: Any, device: str): - self._model = model - self._device = device - - @batch - def __call__(self, image: np.ndarray): - logger.debug(f"Image data: {image.shape} ({image.size})") - - image = torch.from_numpy(image).to(self._device) - with torch.inference_mode(): - logits = self._model(pixel_values=image).logits - logits = logits.to("cpu") - - labels = [] - for logit in logits: - predicted_label = logit.argmax(-1).item() - label = np.char.encode(self._model.config.id2label[predicted_label], "utf-8") - labels.append([label]) - - return {"label": np.array(labels)} - - -def _infer_function_factory(devices: List[str]): - infer_funcs = [] - for device in devices: - model = ResNetForImageClassification.from_pretrained("microsoft/resnet-50") - model = model.half().to(device).eval() - infer_funcs.append(_InferFuncWrapper(model=model, device=device)) - - return infer_funcs - - -def main(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--max-batch-size", - type=int, - default=32, - help="Batch size of request.", - required=False, - ) - parser.add_argument( - "--number-of-instances", - type=int, - default=2, - help="Batch size of request.", - required=False, - ) - - parser.add_argument( - "--verbose", - action="store_true", - default=False, - ) - args = parser.parse_args() - - devices = [DEVICE] * args.number_of_instances - with Triton() as triton: - logger.info(f"Loading ResNet50 PyTorch model on devices: {devices}") - triton.bind( - model_name="ResNet50", - infer_func=_infer_function_factory(devices), - inputs=[ - Tensor( - name="image", - dtype=np.float16, - shape=(3, 224, 224), - ), - ], - outputs=[ - Tensor(name="label", dtype=bytes, shape=(1,)), - ], - config=ModelConfig( - max_batch_size=args.max_batch_size, - ), - strict=True, - ) - logger.info("Serving model") - triton.serve() - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/multiple_models_python/README.md b/stf/stf-api-alternative/pytriton/examples/multiple_models_python/README.md deleted file mode 100644 index 69801a97f871bf4a9ff08d24e022692c3d7a42db..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/multiple_models_python/README.md +++ /dev/null @@ -1,53 +0,0 @@ - - -# Multi-Model Deployment of Python Model - -## Overview - -The example presents a deployment of multiple Python models at the same time. - -Example consists of following scripts: - -- `install.sh` - install additional dependencies -- `server.py` - start the model with Triton Inference Server -- `client.py` - execute HTTP/gRPC requests to the deployed model - -## Quick Start - -The step-by-step guide: - -1. Install PyTriton following - the [installation instruction](../../README.md#installation) -2. Install the additional packages using `install.sh` - -```shell -./install.sh -``` - -3. In current terminal start the model on Triton using `server.py` - -```shell -./server.py -``` - -4. Open new terminal tab (ex. `Ctrl + T` on Ubuntu) or window -5. Go to the example directory -6. Run the `client.py` to perform queries on model: - -```shell -./client.py -``` diff --git a/stf/stf-api-alternative/pytriton/examples/multiple_models_python/__init__.py b/stf/stf-api-alternative/pytriton/examples/multiple_models_python/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/multiple_models_python/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/multiple_models_python/client.py b/stf/stf-api-alternative/pytriton/examples/multiple_models_python/client.py deleted file mode 100644 index 291ef558a42517634ea92170eabbe93ea3a6cd1d..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/multiple_models_python/client.py +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Example client script for multiple_models example.""" -import logging - -import numpy as np - -from pytriton.client import ModelClient - -logger = logging.getLogger("examples.multiple_models_python.client") - -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - -batch_size = 2 -a_batch = np.ones((batch_size, 1), dtype=np.float32) - -logger.info(f"a: {a_batch.tolist()}") - -with ModelClient("localhost", "Multiply2") as client2: - with ModelClient("localhost", "Multiply4") as client4: - result2_batch = client2.infer_batch(a_batch) - result4_batch = client4.infer_batch(a_batch) - -for output_name, data_batch in result2_batch.items(): - logger.info(f"Multiply2/{output_name}: {data_batch.tolist()}") -for output_name, data_batch in result4_batch.items(): - logger.info(f"Multiply4/{output_name}: {data_batch.tolist()}") diff --git a/stf/stf-api-alternative/pytriton/examples/multiple_models_python/install.sh b/stf/stf-api-alternative/pytriton/examples/multiple_models_python/install.sh deleted file mode 100644 index 73854bc2993639c6f6ee7387dcb8880c8cf10e84..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/multiple_models_python/install.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -set -xe - -pip install numpy \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/multiple_models_python/server.py b/stf/stf-api-alternative/pytriton/examples/multiple_models_python/server.py deleted file mode 100644 index 54955b550aae058ec8ff152c5bdbc5bbcb059e4f..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/multiple_models_python/server.py +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Example with multiple models served on single Triton server.""" -import logging - -import numpy as np - -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton - -logger = logging.getLogger("examples.multiple_models_python.server") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - -@batch -def _multiply2(multiplicand): - product = multiplicand * 2.0 - return [product] - - -@batch -def _multiply4(multiplicand): - product = multiplicand * 4.0 - return [product] - - -with Triton() as triton: - logger.info("Loading Multiply2 model") - triton.bind( - model_name="Multiply2", - infer_func=_multiply2, - inputs=[ - Tensor(name="multiplicand", dtype=np.float32, shape=(-1,)), - ], - outputs=[ - Tensor(name="product", dtype=np.float32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=8), - strict=True, - ) - logger.info("Loading Multiply4 model") - triton.bind( - model_name="Multiply4", - infer_func=_multiply4, - inputs=[ - Tensor(name="multiplicand", dtype=np.float32, shape=(-1,)), - ], - outputs=[ - Tensor(name="product", dtype=np.float32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=8), - strict=True, - ) - triton.serve() diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/README.md b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/README.md deleted file mode 100644 index e8a8afdf741bf685c1ca153fd4c375fa3e84327d..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/README.md +++ /dev/null @@ -1,461 +0,0 @@ - - -# NeMo Megatron GPT model deployment - -This example demonstrates how to deploy a NeMo Megatron GPT model using PyTriton. - - - - -- [Introduction](#introduction) -- [Requirements](#requirements) -- [Text Generation Task](#text-generation-task) - - [Running the Server](#running-the-server) - - [Running the Client](#running-the-client) -- [Prompt learning](#prompt-learning) - - [Training and evaluation of Prompt Learning model](#training-and-evaluation-of-prompt-learning-model) - - [Running the Server](#running-the-server-1) - - [Running the Client](#running-the-client-1) -- [Clusters deployment](#clusters-deployment) - - [Running server on Slurm cluster](#running-server-on-slurm-cluster) - - [Running server on Kubernetes cluster](#running-server-on-kubernetes-cluster) - - [Deployment instruction](#deployment-instruction) - - [Creating Persistent Volume Claim](#creating-persistent-volume-claim) - - [Using CSI host path](#using-csi-host-path) - - [Using NFS disk](#using-nfs-disk) - - - -## Introduction - -[NVIDIA NeMo Megatron](https://developer.nvidia.com/nemo/megatron) is an end-to-end framework for training and deploying large language models (LLMs) with billions or trillions of parameters. - -## Requirements - -The workstation on which you will run the server script should meet the requirements defined in the model documentation. For example, see the documentation for the default model in this example - [NeMo Megatron GPT 1.3B](https://huggingface.co/nvidia/nemo-megatron-gpt-1.3B). - -The easiest way to run these examples is in an [NVIDIA NeMo Docker container](https://catalog.ngc.nvidia.com/orgs/nvidia/containers/nemo) -where the environment is set up out-of-the-box. Alternatively, you can set up your environment manually, as described in the [model instructions](https://huggingface.co/nvidia/nemo-megatron-gpt-1.3B#step-1-install-nemo-and-dependencies). - -If you choose to use the container, we recommend installing the -[NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/overview.html). - -## Text Generation Task - -### Running the Server - -1. Run the NVIDIA NeMo Docker container: - - ```bash - cd - docker run \ - --rm -it \ - --gpus all --shm-size 2G \ - -v $PWD:$PWD -w $PWD \ - -p 8000:8000 -p 8001:8001 -p 8002:8002 \ - --name nemo_megatron_gpt_server \ - nvcr.io/nvidia/nemo:23.06 bash - ``` - - For more information on Docker or cluster deployments, see the [documentation](../../docs/deploying_in_clusters.md). - -2. In the recently started container, [install PyTriton](../../docs/installation.md): - - ```bash - pip install -U nvidia-pytriton - ``` - -3. Start the NeMo Megatron GPT model: - - ```bash - ./examples/nemo_megatron_gpt_multinode/server.py - ``` - - By default, NeMo Megatron uses all available GPUs, distributing the model with increased data parallel size if possible. The tensor model parallel size (TP) and pipeline model parallel size (PP) remain the same as during the training of the loaded model. - For example, for NeMo Megatron GPT 20B with TP=4 and PP=1, inference on a workstation with 8 GPUs will use 2 copies of the model (data parallelism size = 2). - - To run NeMo Megatron GPT model inference on a specific number of GPUs, execute the following commands in your Python environment: - - ```bash - ./examples/nemo_megatron_gpt_multinode/server.py --gpus 2 # uses first 2 GPUs - ./examples/nemo_megatron_gpt_multinode/server.py --gpus 2,3 # uses 2 GPUs with IDs 2 and 3 - ./examples/nemo_megatron_gpt_multinode/server.py --gpus -1 # uses all available GPUs - ``` - - For TP and PP model parameters, refer to the documentation of the model you are using. - - To select a NeMo Megatron repository on HuggingFace Hub, use the `--model-repo-id` parameter. - - ```bash - ./examples/nemo_megatron_gpt_multinode/server.py --model-repo-id nvidia/nemo-megatron-gpt-20B - ``` - -The server script will: - -1. Ensure the model and tokenizer data are downloaded. -2. Load the downloaded model and initialize the environment for it. -3. Start the PyTriton server, on listening on [configured ports](../../docs/initialization.md). - - In this example, the HTTP endpoint will listen on port 8000. Due to [Docker port publishing](https://docs.docker.com/config/containers/container-networking/#published-ports), it should be accessible outside of the container. - -### Running the Client - -The client implements a simple text generation task. - -You can run the client script in any Python environment with network access to the NVIDIA Triton server endpoints. - -The server script logs contain a `Server http url http://:` line, which defines the hostname and port to which the Triton Inference Server HTTP endpoint is bound. When running the server in a Docker container, the URL might contain the Docker container ID in place of the hostname. Such a URL is unusable outside of the container. - -Example client calls: - -- Run it in the same container as the server: - - ```bash - docker exec nemo_megatron_gpt_server ./examples/nemo_megatron_gpt_multinode/client.py --url http://localhost:8000 - ``` - -- Run it in a separate container: - - ```bash - cd - docker run --rm -it -v $PWD:$PWD -w $PWD --link nemo_megatron_gpt_server nvcr.io/nvidia/pytorch:23.06-py3 bash - # now inside obtained container - pip install -U nvidia-pytriton - ./examples/nemo_megatron_gpt_multinode/client.py --url http://nemo_megatron_gpt_server:8000 - ``` - -- Run it locally: - - ```bash - # setup python virtualenv if needed - pip install virtualenv - virtualenv -p $(which python3.8) .venv - source .venv/bin/activate - # and install pytriton - pip install -U nvidia-pytriton - # run client - # thanks to docker port publishing it is available outside of docker - ./examples/nemo_megatron_gpt_multinode/client.py --url http://localhost:8000 - ``` - -To prepare custom prompts, use the `--prompts` argument: - -```shell -./examples/nemo_megatron_gpt_multinode/client.py --prompts "Thank you for" "Q: Are you going for a lunch?" -``` - -As a result, the client prints sequences generated by the model based on the prompts sent in the request. - -``` -> ./examples/nemo_megatron_gpt_multinode/client.py --url http://nemo_megatron_gpt_server:8000 --prompts "Thank you for" "Q: Are you going for a lunch?" -================================ -Thank you for the book. I have a question about Sissie in particular because we were supposed to watch Rocky. She didn't do anything like that, so now I'm wondering if she -================================ -Q: Are you going for a lunch? -Lunch is typically served cold but I'm interested in ordering this dessert which, of course, doesn't come up as well on menus online. -``` - -## Prompt learning - -Prompt learning is an efficient method for adapting pretrained large language models (LLMs) to various tasks without tuning the entire set of parameters. This approach helps prevent catastrophic forgetting issues that may occur during fine-tuning. - -For more information, refer to the [NeMo prompt learning documentation -](https://docs.nvidia.com/deeplearning/nemo/user-guide/docs/en/main/nlp/nemo_megatron/prompt_learning.html). - -### Training and evaluation of Prompt Learning model - -To serve a prompt learning model with PyTriton, you need both the pretrained LLM and the prompt learning model. You can find an example of preparing a prompt learning model in the [NeMo prompt learning tutorial notebook](https://github.com/NVIDIA/NeMo/blob/stable/tutorials/nlp/Multitask_Prompt_and_PTuning.ipynb) and the above documentation. - -Use the `train_prompt_learning_model.sh` script to quickly prepare a prompt learning model. This script trains a model for sentiment, intent, and slot filling tasks and saves it as `sentiment_intent_slot_p_tuning.nemo`. - -```shell -REPO_ID=nvidia/nemo-megatron-gpt-1.3B -MODEL_FILENAME=nemo_gpt1.3B_fp16.nemo -./examples/nemo_megatron_gpt_multinode/train_prompt_learning_model.sh -``` - -Next, run the NeMo evaluation script to test the prompt learning model: - -```shell -NEMO_REPO_DIR="/workspace/nemo" -LANGUAGE_MODEL_PATH=$(python -c "import huggingface_hub; print(huggingface_hub.try_to_load_from_cache(\"${REPO_ID}\", \"${MODEL_FILENAME}\"))") -DATASETS_DIR=${DATASETS_DIR:-$PWD/datasets} -SENTIMENT_DIR=${DATASETS_DIR}/sentiment -ASSISTANT_DIR=${DATASETS_DIR}/assistant -python3 ${NEMO_REPO_DIR}/examples/nlp/language_modeling/megatron_gpt_prompt_learning_eval.py \ - virtual_prompt_model_file=sentiment_intent_slot_p_tuning.nemo \ - gpt_model_file=${LANGUAGE_MODEL_PATH} \ - inference.greedy=True \ - inference.add_BOS=False \ - trainer.devices=1 \ - trainer.num_nodes=1 \ - tensor_model_parallel_size=1 \ - pipeline_model_parallel_size=1 \ - data_paths=["${SENTIMENT_DIR}/FinancialPhraseBank-v1.0/financial_phrase_bank_test.jsonl","${ASSISTANT_DIR}/assistant_test.jsonl"] \ - pred_file_path=sentiment_intent_slot_p_tuning_test_predictions.jsonl -``` - -This generates a `sentiment_intent_slot_p_tuning_test_predictions.jsonl` file containing predictions for both tasks. - -### Running the Server - -You can now run the PyTriton server with the prompt learning model: - -```shell -./examples/nemo_megatron_gpt_multinode/server.py \ - --model-repo-id ${REPO_ID} \ - --model-filename ${MODEL_FILENAME} \ - --prompt-model-path sentiment_intent_slot_p_tuning.nemo -``` - -### Running the Client - -Can reuse the same client script as for the pretrained model: - -```shell -./examples/nemo_megatron_gpt_multinode/client.py --prompts "Thank you for" "Q: Are you going for a lunch?" -``` - -As a result, the client prints sequences generated by the model based on the prompts sent in the request. - -``` -> ./examples/nemo_megatron_gpt_multinode/client.py --url http://nemo_megatron_gpt_server:8000 --prompts "Thank you for" "Q: Are you going for a lunch?" -================================ -Thank you for the book. I have a question about Sissie in particular because we were supposed to watch Rocky. She didn't do anything like that, so now I'm wondering if she -================================ -Q: Are you going for a lunch? -Lunch is typically served cold but I'm interested in ordering this dessert which, of course, doesn't come up as well on menus online. -``` - -## Clusters deployment - -### Running server on Slurm cluster - -This example supports also scenario where model inference is performed in a multiple nodes, multiple gpus scenario. -For that we can use [Slurm](https://slurm.schedmd.com/) cluster management system. - -1. Prepare Slurm submission file. Example Slurm `sbatch` submission file: - - ```shell - #!/usr/bin/env bash - - #SBATCH --job-name=nemo_megatron_gpt - #SBATCH --nodes=2 - #SBATCH --ntasks-per-node=8 - #SBATCH --open-mode=append - #SBATCH --output=slurm_job-%x-%J.out - #SBATCH --partition= - #SBATCH --time=2:00:00 - - set -x - - # assume that your current working directory is PyTriton repository - # use github.com/nvidia/pyxis plugin - srun --output slurm_job-%x-%J.out \ - --container-name nemo_megatron_gpt_container \ - --container-image nvcr.io/nvidia/nemo:23.06 \ - --container-mounts "${PWD}":"${PWD}" \ - --container-workdir "${PWD}" \ - --no-container-mount-home \ - --unbuffered \ - bash -c '[[ ${LOCAL_RANK} -eq 0 ]] && pip install -U nvidia-pytriton || true' - # reuse of container prepared in above srun - srun --output slurm_job-%x-%J.out \ - --container-name nemo_megatron_gpt_container \ - --container-mounts "${PWD}":"${PWD}" \ - --container-workdir "${PWD}" \ - --no-container-mount-home \ - --unbuffered \ - bash -c "./examples/nemo_megatron_gpt_multinode/server.py --gpus 8 --nodes 2 --model-repo-id nvidia/nemo-megatron-gpt-20B" - ``` - - Parameters values requirements: - - - Number of tasks have to be equal to the product of `--nodes` and `--gpus`. - - Number of nodes on which script is run have to be equal `--nodes` parameter. - - There might be need to add/modify `sbatch` and `srun` commands parameters - refer to documentation of your cluster for more information. - -2. Submit job and observe logs in defined log paths: - - ```shell - sbatch - ``` - -For multi-node scenario you can run client: - -- locally as in single node scenario with url pointing cluster workstation -- on cluster in same container as server: - - ```shell - # attach to container - srun --pty \ - --jobid \ - --container-name \ - --container-mounts "${PWD}:${PWD}" \ - --container-workdir "${PWD}" \ - --no-container-mount-home \ - bash - # in just attached container - ./examples/nemo_megatron_gpt_multinode/client.py --url http://:8000 - ``` - -- on cluster in new container: - - ```shell - # start new container - srun --pty \ - --partition \ - --container-image nvcr.io/nvidia/nemo:23.06 \ - --container-mounts "${PWD}:${PWD}" \ - --container-workdir "${PWD}" \ - --no-container-mount-home \ - bash - # in newly created container install PyTriton and execute - pip install -U nvidia-pytriton - ./examples/nemo_megatron_gpt_multinode/client.py --url http://:8000 - ``` - - There might be need to add/modify `srun` commands parameters - refer to documentation of your cluster for more information. - -### Running server on Kubernetes cluster - -The following prerequisites must be matched to run the example: - -- Kubernetes cluster with NVIDIA GPU node -- [NVIDIA Device Plugin](https://github.com/NVIDIA/k8s-device-plugin) installed in Kubernetes cluster -- Docker Containers Registry accessible from Kubernetes cluster -- [Installed Helm](https://helm.sh/docs/intro/install/) for creating the deployment and test job - -Optionally you may install NVIDIA Container Toolkit and NVIDIA GPU Operator which enable more features -like [MIG](https://docs.nvidia.com/datacenter/cloud-native/gpu-operator/gpu-operator-mig.html) or -[Time Slicing](https://docs.nvidia.com/datacenter/cloud-native/gpu-operator/gpu-sharing.html) support in the cluster. -To learn more how to set up Kubernetes cluster with NVIDIA GPU you can review [ -NVIDIA Cloud Native Documentation](https://docs.nvidia.com/datacenter/cloud-native/contents.html) - -#### Deployment instruction - -Below, we present a step-by-step guide assuming that **all the commands are executed from the root of repository**. - -Follow these steps to run and test example in the cluster: - -1. [Optional] Build PyTriton wheel following the [build instruction](../../docs/building.md) -2. Prepare the tag under which image is going to be pushed to your Docker Containers Registry accessible from Kubernetes - cluster. Example for local cluster (minikube, k3s) with registry hosted inside the cluster: - -```shell -export DOCKER_IMAGE_NAME_WITH_TAG=localhost:5000/nemo-example:latest -``` - -3. Build and push the Docker container image to your registry: - -```shell -# Export the base image used for build -export FROM_IMAGE_NAME=nvcr.io/nvidia/nemo:23.06 -./examples/nemo_megatron_gpt_multinode/kubernetes/build_and_push.sh -``` - -**Note**: By default the container is built using `pytriton` package from pypi.org. To build container with wheel built -locally use `export BUILD_FROM=dist` before executing script. - -4. At this point there are 2 options to deploy the model depending on the size of the model: - a) Install the Helm Chart with deployment and service for single-node: - -```shell -helm upgrade -i --set deployment.image=${DOCKER_IMAGE_NAME_WITH_TAG} \ ---set deployment.numOfGPUs=1 \ -nemo-example \ -./examples/nemo_megatron_gpt_multinode/kubernetes/single-node -``` - -b) Install the Helm Chart with deployment and service for multi-node: - -**Important**: Running multi-node requires to create Persistent Volume Claim in the cluster shared between PODs. You can -pass name as argument to Helm Chart during installation. Read more how to create -[Persistent Volume Claim](#creating-persistent-volume-claim). - -**Please note**: The multi-node deployment for scaling requires improved configuration of services and load balancing. - -```shell -helm upgrade -i --set statefulset.image=${DOCKER_IMAGE_NAME_WITH_TAG} \ ---set statefulset.persistentVolumeClaim=llm-cache-pvc \ ---set statefulset.numOfNodes=3 \ ---set statefulset.numOfGPUs=1 \ -nemo-example \ -./examples/nemo_megatron_gpt_multinode/kubernetes/multi-node -``` - -5. Install the Helm Chart with client test - -```shell -helm install --set image=${DOCKER_IMAGE_NAME_WITH_TAG} \ -nemo-example-test \ -./examples/nemo_megatron_gpt_multinode/kubernetes/test -``` - -Now, you can review the logs from the running PODs to verify the inference is running. To show the logs from cluster -for given POD first list all running pods: - -```shell -kubectl get pods -``` - -Next show logs from server or client: - -```shell -kubectl logs {NAME} -``` - -To remove the installed charts simply run: - -```shell -helm uninstall nemo-example-test -helm uninstall nemo-example -``` - -#### Creating Persistent Volume Claim - -This section describe how to create Persistent Volume Claim in Kuberenetes cluster using CSI or NFS drive. - -##### Using CSI host path - -When you are running on local machine (ex. Minikube or k3s) you can use CSI host path to create a persistent volume -claim. Make sure that appropriate extension for your cluster has been installed and run: - -```shell -kubectl apply -f ./examples/nemo_megatron_gpt_multinode/kubernetes/persistent-volume-claim-csi.yaml -``` - -##### Using NFS disk - -When you are running Kubernetes cluster in Cloud Service Provider you can create persistent volume claim using NFS disk. - -First, create the NFS disk and obtain its IP address. Make sure the disk is in the same network as Kubernetes cluster. -The pre-defined file share name for the NFS storage is `llm`. - -Next modify the `./examples/nemo_megatron_gpt_multinode/kubernetes/persistent-volume-claim-nfs.yaml` file and update the -`{IP}` value. Then run: - -```shell -kubectl apply -f ./examples/nemo_megatron_gpt_multinode/kubernetes/persistent-volume-nfs.yaml -``` - -Once the persistent volume is ready the claim can be created using: - -```shell -kubectl apply -f ./examples/nemo_megatron_gpt_multinode/kubernetes/persistent-volume-claim-nfs.yaml -``` diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/__init__.py b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/client.py b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/client.py deleted file mode 100644 index 8178ad9647334e1e7643407f97c9496758188570..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/client.py +++ /dev/null @@ -1,144 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Client for BART classifier sample server.""" -import argparse -import logging -import typing - -import numpy as np - -from pytriton.client import ModelClient - -_AVAILABLE_TASKS = ["sentiment", "intent_and_slot", "text_generation"] -_TASK_SEP = "|" - - -def _parse_prompts(prompts_list) -> typing.List[typing.Tuple[str, str]]: - """ - Parse prompts in the format of '[:]'. - Available tasks: {', '.join(_AVAILABLE_TASKS)}. If you don't specify a task name, the model will default to text generation. - """ - - def _parse_prompt(prompt_str: str) -> typing.Tuple[str, str]: - if _TASK_SEP in prompt_str: - task_name, value = prompt_str.split(_TASK_SEP, 1) - task_name = task_name.strip().lower() - else: - task_name = "text_generation" - value = prompt_str.strip() - return task_name, value - - return [_parse_prompt(prompt_str) for prompt_str in prompts_list] - - -def main(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--url", - default="localhost", - help=( - "Url to Triton server (ex. grpc://localhost:8000)." - "HTTP protocol with default port is used if parameter is not provided" - ), - ) - parser.add_argument( - "--init-timeout-s", - type=float, - default=600.0, - help="Server and model ready state timeout in seconds", - ) - parser.add_argument( - "--output-len", - type=int, - default=30, - help="Number of output tokens", - ) - parser.add_argument( - "--prompts", - default=[ - "Q: How are you?", - "Q: How big is the universe?", - f"sentiment{_TASK_SEP}It estimates the operating profit to further improve from the third quarter.", - f"intent_and_slot{_TASK_SEP}What is the weather like today?", - ], - nargs="+", - help=( - f"Prompts should be in the format of '[{_TASK_SEP}]'. " - f"Available tasks: {', '.join(_AVAILABLE_TASKS)}. " - "If you don't specify a task name, the model will default to text generation." - ), - ) - parser.add_argument( - "--verbose", - default=False, - action="store_true", - help="Enable verbose logging", - ) - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - logger = logging.getLogger("nemo.client") - - tasks_and_prompts = _parse_prompts(args.prompts) - tasks, prompts = tuple(zip(*tasks_and_prompts)) - if not all(task in _AVAILABLE_TASKS for task in tasks): - raise ValueError(f"Unknown tasks: {set(tasks) - set(_AVAILABLE_TASKS)}") - - batch_size = len(args.prompts) - - def _str_list2numpy(str_list: typing.List[str]) -> np.ndarray: - str_ndarray = np.array(str_list)[..., np.newaxis] - return np.char.encode(str_ndarray, "utf-8") - - tasks = _str_list2numpy(tasks) - prompts = _str_list2numpy(prompts) - - def _param(dtype, value): - if bool(value): - return np.ones((batch_size, 1), dtype=dtype) * value - else: - return np.zeros((batch_size, 1), dtype=dtype) - - logger.info("================================") - logger.info("Preparing the client") - with ModelClient(args.url, "GPT", init_timeout_s=args.init_timeout_s) as client: - logger.info("================================") - logger.info("Sent batch for inference:") - - result_dict = client.infer_batch( - tasks=tasks, - prompts=prompts, - min_length=_param(np.int32, 20), - max_length=_param(np.int32, args.output_len), - use_greedy=_param(np.bool_, True), - temperature=_param(np.float32, 1.0), - top_k=_param(np.int32, 0), - top_p=_param(np.float32, 1.0), - repetition_penalty=_param(np.float32, 1.0), - add_BOS=_param(np.bool_, True), - all_probs=_param(np.bool_, False), - compute_logprob=_param(np.bool_, False), - ) - - sentences = np.char.decode(result_dict["sentences"].astype("bytes"), "utf-8") - sentences = np.squeeze(sentences, axis=-1) - for sentence in sentences: - logger.info("================================") - logger.info(sentence) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/gpt.py b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/gpt.py deleted file mode 100644 index aa4cc97809e9534083c0c155b86f0072765ffdc9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/gpt.py +++ /dev/null @@ -1,113 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import typing - -import numpy as np -import torch # pytype: disable=import-error -from nemo.collections.nlp.modules.common.transformer.text_generation import ( # pytype: disable=import-error - LengthParam, - OutputType, - SamplingParam, -) - -from pytriton.decorators import ConstantPadder, batch, first_value, group_by_values -from pytriton.exceptions import PyTritonInvalidOperationError, PyTritonUnrecoverableError -from pytriton.model_config import Tensor - -from helpers import cast_output, typedict2tensor # pytype: disable=import-error # isort:skip - - -_INPUT_PARAMETERS_NAMES = list(typing.get_type_hints(LengthParam)) + list(typing.get_type_hints(SamplingParam)) - - -class NemoGptCallable: - def __init__(self, *, model_name: str, model): - self.model_name = model_name - self._model = model.cuda() - self._is_prompt_learning_model = hasattr(model, "virtual_prompt_style") - self._text_generate_fn = ( - self._model.frozen_model.generate if self._is_prompt_learning_model else self._model.generate - ) - self._task_generate_fn = self._model.generate if self._is_prompt_learning_model else None - self.inputs = ( - ( - Tensor(name="tasks", shape=(1,), dtype=bytes), - Tensor(name="prompts", shape=(1,), dtype=bytes), - ) - + typedict2tensor(LengthParam, overwrite_kwargs={"optional": True}, defaults=None) - + typedict2tensor(SamplingParam, overwrite_kwargs={"optional": True}, defaults=None) - ) - self.outputs = typedict2tensor(OutputType) - self._outputs_dict = {output.name: output for output in self.outputs} - - def _format_prompts( - self, tasks: typing.List[str], prompts: typing.List[str] - ) -> typing.List[typing.Union[str, typing.Dict[str, str]]]: - formatted_prompts = [] - for task_name, prompt in zip(tasks, prompts): - task_template = self._model.task_templates[task_name] - formatted_prompts.append( - { - **{"taskname": task_name}, - **dict(zip(task_template["prompt_template_fields"], [prompt])), - } - ) - return formatted_prompts - - @batch - @group_by_values("tasks", *_INPUT_PARAMETERS_NAMES, pad_fn=ConstantPadder(0)) - @first_value(*_INPUT_PARAMETERS_NAMES, strict=False) - def infer(self, **inputs: np.ndarray) -> typing.Dict[str, np.ndarray]: - # Tell other ranks we're doing generate - generate_num = 0 - choice = torch.cuda.LongTensor([generate_num]) - torch.distributed.broadcast(choice, 0) - - def _str_ndarray2list(str_ndarray: np.ndarray) -> typing.List[str]: - str_ndarray = str_ndarray.astype("bytes") - str_ndarray = np.char.decode(str_ndarray, encoding="utf-8") - str_ndarray = str_ndarray.squeeze(axis=-1) - return str_ndarray.tolist() - - tasks = _str_ndarray2list(inputs.pop("tasks")) - prompts = _str_ndarray2list(inputs.pop("prompts")) - length_params = LengthParam(**{k: v for k, v in inputs.items() if k in typing.get_type_hints(LengthParam)}) - sampling_params = SamplingParam( - **{k: v for k, v in inputs.items() if k in typing.get_type_hints(SamplingParam)} - ) - if tasks[0] == "text_generation": - generate_fn = self._text_generate_fn - else: - generate_fn = self._task_generate_fn - if generate_fn is None: - raise PyTritonInvalidOperationError( - f"Model {self.model_name} does not support task {tasks[0]}. " - "Only text_generation task is supported." - ) - prompts = self._format_prompts(tasks, prompts) - - try: - output: OutputType = generate_fn( - inputs=prompts, - length_params=length_params, - sampling_params=sampling_params, - ) - except RuntimeError as e: - raise PyTritonUnrecoverableError("Fatal error occurred - no further inferences possible.") from e - - output = { - output_name: cast_output(data, self._outputs_dict[output_name].dtype) - for output_name, data in output.items() - } - return output diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/helpers.py b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/helpers.py deleted file mode 100644 index 3f95e6d2bc87e14b39934d085bf8518a48f8ed5c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/helpers.py +++ /dev/null @@ -1,272 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import importlib -import logging -import pathlib -import socket -import typing -import warnings -from typing import Dict, Tuple, Type, Union - -import filelock -import huggingface_hub # pytype: disable=import-error -import numpy as np -import omegaconf # pytype: disable=import-error -import torch # pytype: disable=import-error -from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import ( # pytype: disable=import-error - MegatronGPTModel, -) -from nemo.collections.nlp.models.language_modeling.megatron_gpt_prompt_learning_model import ( # pytype: disable=import-error - MegatronGPTPromptLearningModel, -) -from nemo.collections.nlp.models.language_modeling.megatron_t5_model import ( # pytype: disable=import-error - MegatronT5Model, -) -from nemo.collections.nlp.models.language_modeling.megatron_t5_prompt_learning_model import ( # pytype: disable=import-error - MegatronT5PromptLearningModel, -) -from nemo.collections.nlp.parts.nlp_overrides import NLPSaveRestoreConnector # pytype: disable=import-error -from nemo.utils.app_state import AppState # pytype: disable=import-error - -from pytriton.exceptions import PyTritonBadParameterError -from pytriton.model_config import Tensor - -LOGGER = logging.getLogger("nemo.helpers") - - -def cast_output(data, required_dtype): - if isinstance(data, torch.Tensor): - data = data.cpu().numpy() - elif not isinstance(data, np.ndarray): - data = np.array(data) - - data_is_str = required_dtype in (object, np.object_, bytes, np.bytes_) - if data_is_str: - data = np.char.encode(data, "utf-8") - - if data.ndim < 2: - data = data[..., np.newaxis] - return data.astype(required_dtype) - - -def typedict2tensor( - typedict_class, - overwrite_kwargs: typing.Optional[typing.Dict[str, typing.Any]] = None, - defaults: typing.Optional[typing.Dict[str, typing.Any]] = None, -): - def _map_type(type_): - if type_ is int: - return np.int32 - elif type_ is float: - return np.float32 - elif type_ is bool: - return np.bool_ - elif type_ is str: - return bytes - else: - raise PyTritonBadParameterError(f"Unknown type {type_}") - - def _get_tensor_params(type_: Type) -> Dict[str, Union[Tuple[int, ...], type]]: - """ - Returns a shape and a type of Triton tensor. The shape and the type are inferred from a - Python typing. - - Args: - type_: a Python typing which should be a single type or a nested ``List``. If `type_` is a usual - type, then shape is ``(1,)``. If ``type_`` is a nested ``List``, then ``-1`` is added for each - ``List``. E.g., ``List[int]`` -> ``(1, -1)``, ``List[List[int]]`` -> ``(1, -1, -1)``. Additional - Please note that all shapes have additional ``(1,)`` leading dimension. - - Returns: - a dictionary with 2 elements: ``"shape"`` and ``"type"``. ``"type"`` is a numpy type which corresponds - to ``type_``. - """ - count = 0 - while typing.get_origin(type_) is list: - type_ = typing.get_args(type_)[0] - count += 1 - shape = (1,) + (-1,) * count - return {"shape": shape, "dtype": _map_type(type_)} - - overwrite_kwargs = overwrite_kwargs or {} - return tuple( - Tensor(name=name, **_get_tensor_params(type_), **overwrite_kwargs) - for name, type_ in typing.get_type_hints(typedict_class).items() - ) - - -def download_hf_model(repo_id: str, filename: typing.Optional[str] = None) -> pathlib.Path: - hf_cache_dir = pathlib.Path(huggingface_hub.constants.HUGGINGFACE_HUB_CACHE) - lock_dir = hf_cache_dir / huggingface_hub.file_download.repo_folder_name(repo_id=repo_id, repo_type="models") - filename = filename or _get_first_nemo_filename(repo_id) - - lock_path = lock_dir / f"{filename}.lock" - lock_path.parent.mkdir(parents=True, exist_ok=True) - LOGGER.info(f"Lock file {lock_path}") - lock = filelock.FileLock(lock_path) - - with lock: - LOGGER.info(f"Downloading model from https://huggingface.co/{repo_id} filename={filename}") - model_path = huggingface_hub.hf_hub_download(repo_id, filename=filename) # set $HF_HOME to set cache dir - return pathlib.Path(model_path) - - -def _get_first_nemo_filename(repo_id: str) -> str: - client = huggingface_hub.HfApi() - repo_files = client.list_repo_files(repo_id, revision="main") - nemo_files = [f for f in repo_files if f.endswith(".nemo")] - if len(nemo_files) == 0: - raise ValueError(f"Could not find .nemo file in {repo_id}") - filename = nemo_files[0] - if len(nemo_files) > 1: - warnings.warn( - f"Found more than one .nemo file in {repo_id}. Will be using {filename}. Use --repo-filename to specify the exact file name to use.", - stacklevel=1, - ) - return filename - - -def _get_worker_name() -> str: - worker_name = socket.gethostname() - app_state = AppState() - strategy_initialized = app_state.world_size is not None - if strategy_initialized: - worker_name = ( - f"{worker_name}:local={app_state.local_rank},global={app_state.global_rank},dev={app_state.device_id}" - ) - - return worker_name - - -def _patch_pretrained_cfg(pretrained_cfg, trainer): - omegaconf.OmegaConf.set_struct(pretrained_cfg, True) - with omegaconf.open_dict(pretrained_cfg): - attributes_to_update = { - "sequence_parallel": False, - "activations_checkpoint_granularity": None, - "activations_checkpoint_method": None, - "precision": trainer.precision, - } - for name, value in attributes_to_update.items(): - if hasattr(pretrained_cfg, name): - pretrained_cfg[name] = value - return pretrained_cfg - - -def _patch_prompt_learning_cfg( - prompt_learning_cfg: omegaconf.DictConfig, pretrained_cfg: omegaconf.DictConfig, pretrained_model_path: pathlib.Path -) -> omegaconf.DictConfig: - # NeMo prompt learning models doesn't contain target - # thus need to run define target based on target of pretrained model - target = { - _get_target_from_class(MegatronGPTModel): _get_target_from_class(MegatronGPTPromptLearningModel), - _get_target_from_class(MegatronT5Model): _get_target_from_class(MegatronT5PromptLearningModel), - }[pretrained_cfg.target] - - # use nemo archive here - pretrained model will be unpacked 2nd time to temporary dir - with omegaconf.open_dict(prompt_learning_cfg): - prompt_learning_cfg.language_model_path = pretrained_model_path.as_posix() - prompt_learning_cfg.target = target - - return prompt_learning_cfg - - -def _get_target_from_class(target_class) -> str: - return f"{target_class.__module__}.{target_class.__name__}" - - -def load_model( - model_path: pathlib.Path, trainer, *, prompt_learning_model_path: typing.Optional[pathlib.Path] = None -) -> torch.nn.Module: - worker_name = _get_worker_name() - LOGGER.debug(f"Loading {model_path} on {worker_name}") - - save_restore_connector = NLPSaveRestoreConnector() - if model_path.is_dir(): - save_restore_connector.model_extracted_dir = model_path.as_posix() - pretrained_cfg = save_restore_connector.restore_from( - None, model_path.as_posix(), return_config=True, trainer=trainer - ) - if not hasattr(pretrained_cfg, "target"): - pretrained_cfg["target"] = _get_target_from_class(MegatronGPTModel) - - if prompt_learning_model_path is not None: - prompt_learning_model_path = pathlib.Path(prompt_learning_model_path) - save_restore_connector = NLPSaveRestoreConnector() - prompt_learning_cfg = save_restore_connector.restore_from( - None, prompt_learning_model_path.as_posix(), return_config=True, trainer=trainer - ) - prompt_learning_cfg = _patch_prompt_learning_cfg(prompt_learning_cfg, pretrained_cfg, model_path) - model_to_load_path = prompt_learning_model_path - override_config = prompt_learning_cfg - else: - pretrained_cfg = _patch_pretrained_cfg(pretrained_cfg, trainer) - model_to_load_path = model_path - override_config = pretrained_cfg - - module_name, class_name = override_config.target.rsplit(".", 1) - model_class = getattr(importlib.import_module(module_name), class_name) - - # monkeypatch _build_tokenizer method to be process-safe - tokenizer_lock = filelock.FileLock(model_path.parent / f"{model_path.name}.tokenizer.lock") - - def _synced_build_tokenizer(self): - with tokenizer_lock: - self._original_build_tokenizer() - - model_class._original_build_tokenizer = model_class._build_tokenizer - model_class._build_tokenizer = _synced_build_tokenizer - - LOGGER.info("Loading model from %s", model_to_load_path.as_posix()) - LOGGER.debug("Override config: %s", override_config) - - model = model_class.restore_from( - restore_path=model_to_load_path.as_posix(), - trainer=trainer, - override_config_path=override_config, - save_restore_connector=save_restore_connector, - ) - - model.freeze() - model.training = False - try: - # Have to turn off activations_checkpoint_method for inference - model.model.language_model.encoder.activations_checkpoint_method = None - except AttributeError: - pass - return model - - -def setup_distributed_environment(trainer): - def dummy(): - return - - if trainer.strategy.launcher is not None: - trainer.strategy.launcher.launch(dummy, trainer=trainer) - trainer.strategy.setup_environment() - - app_state = AppState() - - hostname = socket.gethostname() - LOGGER.info( - f"global={app_state.global_rank}/{app_state.world_size} " - f"local={app_state.local_rank} @ {hostname}:{app_state.device_id} / " - f"dp={app_state.data_parallel_rank}/{app_state.data_parallel_size} " - f"tp={app_state.tensor_model_parallel_rank}/{app_state.tensor_model_parallel_size} " - f"pp={app_state.pipeline_model_parallel_rank}/{app_state.pipeline_model_parallel_size} " - "vpp=" - f"{getattr(app_state, 'virtual_pipeline_model_parallel_rank', None)}/" - f"{getattr(app_state, 'virtual_pipeline_model_parallel_size', None)}" - ) - return app_state diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/Dockerfile b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/Dockerfile deleted file mode 100644 index f9ddb412a13d5d95f12f6bb85640115194ad05f0..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/Dockerfile +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -ARG FROM_IMAGE_NAME=nvcr.io/nvidia/nemo:23.06 -ARG BUILD_FROM - -FROM ${FROM_IMAGE_NAME} as base -WORKDIR /opt/app - -COPY examples/nemo_megatron_gpt_multinode/install.sh /opt/app -RUN /opt/app/install.sh - -# Use when build PyTriton from source -FROM base as install-from-dist -COPY dist/*.whl /opt/app -RUN pip install /opt/app/*.whl - -# Install from pypi -FROM base as install-from-pypi -RUN pip install -U nvidia-pytriton - -FROM install-from-${BUILD_FROM} AS image - -ENV PYTHONUNBUFFERED=1 - -WORKDIR /opt/app - -RUN apt update -y && apt install -y dnsutils - -COPY examples/nemo_megatron_gpt_multinode/client.py /opt/app -COPY examples/nemo_megatron_gpt_multinode/server.py /opt/app -COPY examples/nemo_megatron_gpt_multinode/helpers.py /opt/app -COPY examples/nemo_megatron_gpt_multinode/gpt.py /opt/app -COPY examples/nemo_megatron_gpt_multinode/kubernetes/run.sh /opt/app -COPY examples/nemo_megatron_gpt_multinode/kubernetes/health.sh /opt/app - -ENTRYPOINT [] diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/build_and_push.sh b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/build_and_push.sh deleted file mode 100644 index 78f93b02e62f0930c27cf48c72260227e1868e11..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/build_and_push.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -if [ -z ${DOCKER_IMAGE_NAME_WITH_TAG} ]; then - echo "Provide Docker image name under to push the created image to your registry" - echo "Example:" - echo " export DOCKER_IMAGE_NAME_WITH_TAG=my-registry:5000/nemo-example:latest" - exit 1 -fi - -if [ -z ${FROM_IMAGE_NAME} ]; then - echo "Provide Docker image that would be used as base image" - echo "Example:" - echo " export FROM_IMAGE_NAME=nvcr.io/nvidia/nemo:23.06" - exit 1 -fi - -BUILD_FROM="${BUILD_FROM:-pypi}" -if [[ ${BUILD_FROM} != "pypi" ]] && [[ ${BUILD_FROM} != "dist" ]]; then - echo "The BUILD_FROM variable should be equal to 'pypi' or 'dist'" - echo "Example:" - echo " export BUILD_FROM=dist" - exit 1 -fi - -set -xe - -DOCKER_BUILDKIT=1 docker build -f examples/nemo_megatron_gpt_multinode/kubernetes/Dockerfile \ - -t ${DOCKER_IMAGE_NAME_WITH_TAG} \ - --build-arg FROM_IMAGE_NAME=${FROM_IMAGE_NAME} \ - --build-arg BUILD_FROM=${BUILD_FROM} . -docker push ${DOCKER_IMAGE_NAME_WITH_TAG} diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/health.sh b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/health.sh deleted file mode 100644 index af7f884e1a24e330b20985f54f7b1bcee3cf3a23..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/health.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#!/bin/bash -set -xe - -RANK=${HOSTNAME##*-} - -if [[ "${RANK}" == "0" ]]; -then - # For head node - validate if Triton Server is running - curl --fail localhost:8000/v2/health/live -else - # For workers - validate the process is running - cat /tmp/health -fi \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/Chart.yaml b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/Chart.yaml deleted file mode 100644 index b09a62ef4612dbb85a9c2f2c4c93f40a41e7f38e..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/Chart.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v2 -description: NeMo Multi Node Example -name: nemo-example -version: 0.0.1 -appVersion: 0.0.1 diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/templates/_helpers.tpl b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/templates/_helpers.tpl deleted file mode 100644 index 79274ecc63d15d74bdd0b20f9fcf67a6666ee3e6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/templates/_helpers.tpl +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -{{/* vim: set filetype=mustache: */}} -{{/* -Expand the name of the chart. -*/}} -{{- define "selector.name" -}} -{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} -{{- end -}} - -{{/* -Create a default fully qualified app name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -If release name contains chart name it will be used as a full name. -*/}} -{{- define "selector.fullname" -}} -{{- if .Values.fullnameOverride -}} -{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- $name := default .Chart.Name .Values.nameOverride -}} -{{- if contains $name .Release.Name -}} -{{- .Release.Name | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} -{{- end -}} -{{- end -}} -{{- end -}} - -{{/* -Create chart name and version as used by the chart label. -*/}} -{{- define "selector.chart" -}} -{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} -{{- end -}} diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/templates/headless.yaml b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/templates/headless.yaml deleted file mode 100644 index 10b04c1c26db451e37aa1b79c41930805a1ac530..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/templates/headless.yaml +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -kind: Service -metadata: - name: {{ template "selector.fullname" . }} - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - selector: - app: {{ template "selector.name" . }} - release: {{ .Release.Name }} - clusterIP: None - ports: - - port: 8000 - targetPort: 8000 - name: http - - port: 8001 - targetPort: 8001 - name: grpc - - port: 8002 - targetPort: 8002 - name: metrics - - port: {{ .Values.communicationPort }} - name: communication - - diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/templates/service.yaml b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/templates/service.yaml deleted file mode 100644 index 8db4f59fd20913874ef0cd789bc93dd3b8a6351d..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/templates/service.yaml +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -kind: Service -metadata: - name: {{ template "selector.fullname" . }}-service - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - type: {{ .Values.service.type }} - selector: - statefulset.kubernetes.io/pod-name: {{ template "selector.name" . }}-0 - ports: - - port: 8000 - targetPort: 8000 - name: http - - port: 8001 - targetPort: 8001 - name: grpc - - port: 8002 - targetPort: 8002 - name: metrics diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/templates/statefulset.yaml b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/templates/statefulset.yaml deleted file mode 100644 index 58ecf8940323a5f5dc766bc2a29bfc42d7e2e65c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/templates/statefulset.yaml +++ /dev/null @@ -1,104 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: StatefulSet -metadata: - name: {{ template "selector.fullname" . }} - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{ template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - selector: - matchLabels: - app: {{ template "selector.name" . }} - release: {{ .Release.Name }} - serviceName: "{{ template "selector.fullname" . }}" - replicas: {{ mul .Values.replicaCount .Values.statefulset.numOfNodes }} - minReadySeconds: 5 - template: - metadata: - labels: - app: {{ template "selector.name" . }} - release: {{.Release.Name}} - annotations: - date: "{{ now | unixEpoch }}" - spec: - terminationGracePeriodSeconds: 10 - {{- if .Values.imagePullSecret }} - imagePullSecrets: - - name: {{.Values.imagePullSecret }} - {{ end }} - restartPolicy: {{ .Values.restartPolicy }} - containers: - - name: {{ .Chart.Name }} - image: {{ .Values.statefulset.image }} - imagePullPolicy: {{ .Values.pullPolicy }} - command: ["bash", "-c"] - args: ["/opt/app/run.sh"] - env: - - name: MODEL_ID - value: {{ .Values.statefulset.modelId }} - - name: PVC_CACHE - value: /mnt/data - - name: NUMBER_OF_NODES - value: "{{ .Values.statefulset.numOfNodes }}" - - name: NUMBER_OF_GPUS - value: "{{ .Values.statefulset.numOfGPUs }}" - - name: POD_NAME - value: {{ template "selector.name" . }} - - name: CLUSTER_NAME - value: {{ .Values.statefulset.clusterName }} - - name: DELAY - value: "15" - - name: PORT - value: "{{ .Values.communicationPort }}" - - name: RDZV_PORT - value: "{{ .Values.rdzvPort }}" - ports: - - containerPort: 8000 - name: http - - containerPort: 8001 - name: grpc - - containerPort: 8002 - name: metrics - - containerPort: {{ .Values.communicationPort }} - name: communication - livenessProbe: - exec: - command: - - bash - - /opt/app/health.sh - initialDelaySeconds: {{ .Values.statefulset.initialDelaySeconds }} - periodSeconds: 10 - resources: - requests: - nvidia.com/gpu: {{ .Values.statefulset.numOfGPUs }} - limits: - nvidia.com/gpu: {{ .Values.statefulset.numOfGPUs }} - volumeMounts: - - name: pvc - mountPath: /mnt/data - - name: shared-memory - mountPath: /dev/shm - volumes: - - name: pvc - persistentVolumeClaim: - claimName: {{ .Values.statefulset.persistentVolumeClaim }} - readOnly: false - - name: shared-memory - emptyDir: - medium: Memory diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/values.yaml b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/values.yaml deleted file mode 100644 index 5e3489c39578f5f7046ef2259516e71eb823c8ce..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/values.yaml +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -imagePullSecret: null -pullPolicy: Always -restartPolicy: Always -replicaCount: 1 -communicationPort: 12345 -statefulset: - image: null - numOfGPUs: 1 - numOfNodes: 3 - persistentVolumeClaim: llm-cache-pvc - modelId: nvidia/nemo-megatron-gpt-1.3B - clusterName: default.svc.cluster.local - initialDelaySeconds: 180 -service: - type: LoadBalancer diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/persistent-volume-claim-csi.yaml b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/persistent-volume-claim-csi.yaml deleted file mode 100644 index 0788eb431c5b826668ec1f74869636ff7e75cc42..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/persistent-volume-claim-csi.yaml +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: llm-cache-pvc -spec: - accessModes: - - ReadWriteOnce - resources: - requests: - storage: 50Gi - storageClassName: csi-hostpath-sc \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/persistent-volume-claim-nfs.yaml b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/persistent-volume-claim-nfs.yaml deleted file mode 100644 index 092d5eb3eb8b8e45ff6766d8c0f30e8106197cfd..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/persistent-volume-claim-nfs.yaml +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: llm-cache-pvc -spec: - accessModes: - - ReadWriteMany - storageClassName: "" - volumeName: llm-cache - resources: - requests: - storage: 2.5T \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/persistent-volume-nfs.yaml b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/persistent-volume-nfs.yaml deleted file mode 100644 index 8354a38a89ae0ccb18ada05d98f71963e47dbb67..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/persistent-volume-nfs.yaml +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -kind: PersistentVolume -metadata: - name: llm-cache -spec: - capacity: - storage: 2.5T - accessModes: - - ReadWriteMany - nfs: - path: /llm - server: {IP} diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/run.sh b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/run.sh deleted file mode 100644 index ebe469ebda32c1c2bfa94273977937d3844547f1..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/run.sh +++ /dev/null @@ -1,66 +0,0 @@ -#!/bin/bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#!/bin/bash -set -xe - -HEALTH_FILE=/tmp/health - -# Define cleanup method -function cleanup() -{ - rm -f ${HEALTH_FILE} -} - -# Create health check file -touch ${HEALTH_FILE} - -# Clean file on script exit -trap cleanup SIGINT SIGTERM ERR EXIT - -# Initial delay to mark POD as health -sleep ${DELAY} - -# Initialize head node information -if [ -z ${POD_NAME} ]; -then - RANK=0 - ADDRESS=localhost -else - POD_ID=${HOSTNAME##*-} - RANK=$((${POD_ID} % ${NUMBER_OF_NODES})) - HEAD_RANK=$((${POD_ID} / ${NUMBER_OF_NODES} * ${NUMBER_OF_NODES})) - ADDRESS=$(dig +short ${POD_NAME}-${HEAD_RANK}.${POD_NAME}.${CLUSTER_NAME}) -fi - -# Display node info and head address -echo "RANK: ${RANK}" -echo "HEAD ADDRESS: ${ADDRESS}" - -# Append cache flags -if [ -n "${PVC_CACHE}" ]; -then -echo "Initializing cache in shared volume ${PVC_CACHE}" -export TORCH_HOME=${PVC_CACHE}/torch -export HF_HOME=${PVC_CACHE}/hf -fi - -# Use torchrun to initialize distributed computation -torchrun \ - --nproc_per_node=${NUMBER_OF_GPUS} --nnodes=${NUMBER_OF_NODES} --node_rank=${RANK} \ - --max_restarts=0 \ - --master_addr=${ADDRESS} --master_port=${PORT} \ - /opt/app/server.py \ - --gpus ${NUMBER_OF_GPUS} --nodes=${NUMBER_OF_NODES} \ - --model-repo-id ${MODEL_ID} diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/single-node/Chart.yaml b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/single-node/Chart.yaml deleted file mode 100644 index bb14c12fc17f042066b707b2f2fbd52e5823ad21..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/single-node/Chart.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v2 -description: NeMo Single Node Example -name: nemo-example -version: 0.0.1 -appVersion: 0.0.1 diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/single-node/templates/_helpers.tpl b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/single-node/templates/_helpers.tpl deleted file mode 100644 index 79274ecc63d15d74bdd0b20f9fcf67a6666ee3e6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/single-node/templates/_helpers.tpl +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -{{/* vim: set filetype=mustache: */}} -{{/* -Expand the name of the chart. -*/}} -{{- define "selector.name" -}} -{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} -{{- end -}} - -{{/* -Create a default fully qualified app name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -If release name contains chart name it will be used as a full name. -*/}} -{{- define "selector.fullname" -}} -{{- if .Values.fullnameOverride -}} -{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- $name := default .Chart.Name .Values.nameOverride -}} -{{- if contains $name .Release.Name -}} -{{- .Release.Name | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} -{{- end -}} -{{- end -}} -{{- end -}} - -{{/* -Create chart name and version as used by the chart label. -*/}} -{{- define "selector.chart" -}} -{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} -{{- end -}} diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/single-node/templates/deployment.yaml b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/single-node/templates/deployment.yaml deleted file mode 100644 index c977cfed231187dc528fd968a674df83e21be194..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/single-node/templates/deployment.yaml +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: apps/v1 -kind: Deployment -metadata: - name: {{ .Release.Name }} - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{ template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - replicas: {{ .Values.replicaCount }} - selector: - matchLabels: - app: {{ template "selector.name" . }} - release: {{ .Release.Name }} - template: - metadata: - labels: - app: {{ template "selector.name" . }} - release: {{.Release.Name}} - annotations: - date: "{{ now | unixEpoch }}" - spec: - {{- if .Values.imagePullSecret }} - imagePullSecrets: - - name: {{.Values.imagePullSecret }} - {{ end }} - restartPolicy: {{ .Values.restartPolicy }} - containers: - - name: {{ .Chart.Name }} - image: {{ .Values.deployment.image }} - imagePullPolicy: {{ .Values.pullPolicy }} - command: ["bash", "-c"] - args: ["/opt/app/run.sh"] - env: - - name: MODEL_ID - value: {{ .Values.deployment.modelId }} - - name: NUMBER_OF_NODES - value: "1" - - name: NUMBER_OF_GPUS - value: "{{ .Values.deployment.numOfGPUs }}" - - name: DELAY - value: "0" - - name: PORT - value: "{{ .Values.communicationPort }}" - ports: - - containerPort: 8000 - name: http - - containerPort: 8001 - name: grpc - - containerPort: 8002 - name: metrics - livenessProbe: - initialDelaySeconds: 60 - periodSeconds: 10 - httpGet: - path: /v2/health/live - port: http - readinessProbe: - initialDelaySeconds: {{ .Values.deployment.initialDelaySeconds }} - periodSeconds: 10 - httpGet: - path: /v2/health/ready - port: http - resources: - requests: - nvidia.com/gpu: {{ .Values.deployment.numOfGPUs }} - limits: - nvidia.com/gpu: {{ .Values.deployment.numOfGPUs }} - volumeMounts: - - name: shared-memory - mountPath: /dev/shm - volumes: - - name: shared-memory - emptyDir: - medium: Memory diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/single-node/templates/service.yaml b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/single-node/templates/service.yaml deleted file mode 100644 index 79d7c7f62cd8d714edbd6aec8721d7691007d7c8..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/single-node/templates/service.yaml +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -kind: Service -metadata: - name: {{ template "selector.fullname" . }}-service - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - type: {{ .Values.service.type }} - ports: - - port: 8000 - targetPort: 8000 - name: http - - port: 8001 - targetPort: 8001 - name: grpc - - port: 8002 - targetPort: 8002 - name: metrics - selector: - app: {{ template "selector.name" . }} - release: {{ .Release.Name }} diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/single-node/values.yaml b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/single-node/values.yaml deleted file mode 100644 index 90d00086a26193a58b812751084faebfd5c40b70..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/single-node/values.yaml +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -imagePullSecret: null -pullPolicy: Always -restartPolicy: Always -replicaCount: 1 -communicationPort: 12345 -deployment: - image: null - numOfGPUs: 1 - modelId: nvidia/nemo-megatron-gpt-1.3B - initialDelaySeconds: 180 -service: - type: LoadBalancer diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/test/Chart.yaml b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/test/Chart.yaml deleted file mode 100644 index 27b7bd20f0a760ab1c0d44d73c0673728166e211..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/test/Chart.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v2 -description: BART PyTriton Example Test -name: nemo-example-test -version: 0.0.1 -appVersion: 0.0.1 diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/test/templates/_helpers.tpl b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/test/templates/_helpers.tpl deleted file mode 100644 index 79274ecc63d15d74bdd0b20f9fcf67a6666ee3e6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/test/templates/_helpers.tpl +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -{{/* vim: set filetype=mustache: */}} -{{/* -Expand the name of the chart. -*/}} -{{- define "selector.name" -}} -{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} -{{- end -}} - -{{/* -Create a default fully qualified app name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -If release name contains chart name it will be used as a full name. -*/}} -{{- define "selector.fullname" -}} -{{- if .Values.fullnameOverride -}} -{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- $name := default .Chart.Name .Values.nameOverride -}} -{{- if contains $name .Release.Name -}} -{{- .Release.Name | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} -{{- end -}} -{{- end -}} -{{- end -}} - -{{/* -Create chart name and version as used by the chart label. -*/}} -{{- define "selector.chart" -}} -{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} -{{- end -}} diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/test/templates/job.yaml b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/test/templates/job.yaml deleted file mode 100644 index 482013e4b04e7da92e8445fce61501d1eb8f14ab..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/test/templates/job.yaml +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: batch/v1 -kind: Job -metadata: - name: {{ .Release.Name }}-deployment - namespace: {{ .Release.Namespace }} - labels: - app: {{ template "selector.name" . }} - chart: {{ template "selector.chart" . }} - release: {{ .Release.Name }} - heritage: {{ .Release.Service }} -spec: - template: - metadata: - labels: - app: {{ template "selector.name" . }} - release: {{.Release.Name}} - annotations: - date: "{{ now | unixEpoch }}" - spec: - {{- if .Values.imagePullSecret }} - imagePullSecrets: - - name: {{.Values.imagePullSecret }} - {{ end }} - containers: - - name: {{ .Chart.Name }} - image: {{ .Values.image }} - imagePullPolicy: {{ .Values.pullPolicy }} - command: ["bash", "-c"] - args: ["while true; do python /opt/app/client.py --url ${SERVICE_URL}; sleep 1;done;"] - env: - - name: SERVICE_URL - value: {{ .Values.serviceUrl }} - restartPolicy: {{ .Values.restartPolicy }} - backoffLimit: {{ .Values.backoffLimit }} diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/test/values.yaml b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/test/values.yaml deleted file mode 100644 index 2d04216c7c722b292e3eaf5724b8573bcd2ca29a..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/kubernetes/test/values.yaml +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -imagePullSecret: null -pullPolicy: Always -restartPolicy: OnFailure -backoffLimit: 4 -image: null -serviceUrl: "http://nemo-example-service:8000" \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/server.py b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/server.py deleted file mode 100644 index b63ba863a268c46f81e87a38561a5536701dc489..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/server.py +++ /dev/null @@ -1,167 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Text generation server with NeMo Megatron GPT model.""" -import argparse -import logging -from pathlib import Path - -import torch # pytype: disable=import-error -import yaml -from nemo.collections.nlp.modules.common.text_generation_utils import generate # pytype: disable=import-error -from nemo.collections.nlp.parts.nlp_overrides import NLPDDPStrategy # pytype: disable=import-error -from pytorch_lightning.trainer.trainer import Trainer # pytype: disable=import-error - -from pytriton.model_config import ModelConfig -from pytriton.triton import Triton, TritonConfig - -from gpt import NemoGptCallable # pytype: disable=import-error # isort:skip -from helpers import ( # pytype: disable=import-error # isort:skip - download_hf_model, - load_model, - setup_distributed_environment, -) - -if not torch.cuda.is_available(): - raise OSError("GPU is needed for the inference") - -ENDPOINT_BIND_ADDRESS = "0.0.0.0" -HTTP_PORT = 8000 -DEFAULT_LOG_FORMAT = "%(asctime)s - %(levelname)8s - %(process)8d - %(threadName)s - %(name)s: %(message)s" - - -def _resolved_path(path_str): - return Path(path_str).resolve() - - -def main(): - """Main function.""" - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--gpus", - default="-1", - help=( - "Number of GPUs to load model on or exact identifiers of GPUs to use separated by comma. " - "If set to -1 all available GPU will be used." - ), - ) - parser.add_argument( - "--nodes", - default=1, - type=int, - help="Number of nodes to load model on", - ) - model_location = parser.add_mutually_exclusive_group() - model_location.add_argument( - "--model-repo-id", - default="nvidia/nemo-megatron-gpt-1.3B", - help="Model repository id on HuggingFace Hub", - ) - parser.add_argument( - "--model-filename", - help="Path to the model nemo file in HF hub. If not provided first on the list .nemo file will be used.", - ) - model_location.add_argument( - "--model-path", - help="Path to the model nemo file in local file system. This argument has a higher priority " - "than `--model-repo-id`.", - type=_resolved_path, - ) - parser.add_argument("--prompt-model-path", help="Path to the model prompt nemo file") - parser.add_argument( - "--timeout", - default=30, - type=int, - required=False, - help="Process group communication timeout", - ) - parser.add_argument( - "--verbose", - default=False, - action="store_true", - help="Enable verbose logging", - ) - parser.add_argument( - "--triton-config", - type=_resolved_path, - help="A path to YAML config for Triton. You may find allowed fields in `pytriton.triton.TritonConfig`", - ) - parser.add_argument( - "--model-name", - default="GPT", - help="A name of a Megatron model inside Triton.", - ) - parser.add_argument( - "--workspace", - type=_resolved_path, - help="Path to a directory where workspace has to be created (optional)." - "If not provided workspace with random name will be created in ~/.cache/pytriton directory.", - ) - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT, force=True) - logger = logging.getLogger("nemo.server") - - logger.info("Initialize trainer:") - logger.info(f" devices: {args.gpus}") - logger.info(f" nodes: {args.nodes}") - - trainer = Trainer( - strategy=NLPDDPStrategy(), - devices=args.gpus, - accelerator="gpu", - num_nodes=args.nodes, - precision=16, - logger=False, - enable_checkpointing=False, - replace_sampler_ddp=False, - ) - if args.model_path is not None: - model = load_model(args.model_path, trainer, prompt_learning_model_path=args.prompt_model_path) - else: - model_path = download_hf_model(args.model_repo_id, args.model_filename) - model = load_model(model_path, trainer, prompt_learning_model_path=args.prompt_model_path) - - app_state = setup_distributed_environment(trainer) - if app_state.global_rank == 0: - infer_callable = NemoGptCallable(model_name=args.model_name, model=model) - if args.triton_config is None: - triton_config = TritonConfig(http_address=ENDPOINT_BIND_ADDRESS, http_port=HTTP_PORT) - else: - with open(args.triton_config) as f: - data = yaml.safe_load(f) - triton_config = TritonConfig.from_dict(data) - with Triton(config=triton_config, workspace=args.workspace) as triton: - triton.bind( - model_name=infer_callable.model_name, - infer_func=infer_callable.infer, - inputs=infer_callable.inputs, - outputs=infer_callable.outputs, - config=ModelConfig(max_batch_size=128), - ) - - triton.serve() - else: - logger.info(f"Running worker with rank {torch.distributed.get_rank()}") - while True: - choice = torch.cuda.LongTensor(1) - torch.distributed.broadcast(choice, 0) - logger.info(f"{choice}") - if choice[0].item() == 0: - generate(model.cuda()) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/train_prompt_learning_model.sh b/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/train_prompt_learning_model.sh deleted file mode 100644 index e777c42ef5bf0ea0040d41501d3ef2a88836ef5e..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/nemo_megatron_gpt_multinode/train_prompt_learning_model.sh +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# following the steps from NeMo prompt learning tutorial notebook -# https://github.com/NVIDIA/NeMo/blob/stable/tutorials/nlp/Multitask_Prompt_and_PTuning.ipynb - -set -xe - -# while using ncr.io/nvidia/nemo docker image -export NEMO_REPO_DIR=/workspace/nemo - -export DATASETS_DIR=${DATASETS_DIR:-$PWD/datasets} -export SENTIMENT_DIR=${DATASETS_DIR}/sentiment -export ASSISTANT_DIR=${DATASETS_DIR}/assistant - -export HF_HOME=${HF_HOME:-$PWD/models} -export REPO_ID=${REPO_ID:-nvidia/nemo-megatron-gpt-1.3B} -export MODEL_FILENAME=${MODEL_FILENAME:-nemo_gpt1.3B_fp16.nemo} - -LANGUAGE_MODEL_PATH="" # will be updated by download_model function - -download_and_preprocess_data() { - # download and preprocess data if not already present - if [ ! -d ${SENTIMENT_DIR} ]; then - mkdir -p ${SENTIMENT_DIR} - wget https://huggingface.co/datasets/financial_phrasebank/resolve/main/data/FinancialPhraseBank-v1.0.zip - unzip FinancialPhraseBank-v1.0.zip -d ${SENTIMENT_DIR} - rm FinancialPhraseBank-v1.0.zip - python3 ${NEMO_REPO_DIR}/scripts/dataset_processing/nlp/financial_phrase_bank/prompt_learning_financial_phrase_bank_preprocessing.py \ - --data-dir ${SENTIMENT_DIR}/FinancialPhraseBank-v1.0 - head -4 ${SENTIMENT_DIR}/FinancialPhraseBank-v1.0/financial_phrase_bank_train.jsonl - fi - - if [ ! -d ${ASSISTANT_DIR} ]; then - mkdir -p ${ASSISTANT_DIR} - wget https://github.com/xliuhw/NLU-Evaluation-Data/archive/master.zip - unzip master.zip -d ${ASSISTANT_DIR} - rm master.zip - python3 ${NEMO_REPO_DIR}/scripts/dataset_processing/nlp/intent_and_slot/prompt_learning_assistant_preprocessing.py \ - --source-dir ${ASSISTANT_DIR}/NLU-Evaluation-Data-master \ - --nemo-format-dir ${ASSISTANT_DIR}/nemo-format \ - --output-dir ${ASSISTANT_DIR} - head -5 ${ASSISTANT_DIR}/assistant_train.jsonl - echo '\n=====\n#Intents: ' $(wc -l < ${ASSISTANT_DIR}/nemo-format/dict.intents.csv) - cat ${ASSISTANT_DIR}/nemo-format/dict.intents.csv - - echo '\n=====\n#Slots: ' $(wc -l < ${ASSISTANT_DIR}/nemo-format/dict.slots.csv) - cat ${ASSISTANT_DIR}/nemo-format/dict.slots.csv - fi -} - -download_model() { - # ensure model downloaded from HF Hub and get path to model - cat >> /tmp/ensure_model.py << EOF -import os -from huggingface_hub import hf_hub_download - -downloaded_model_path = hf_hub_download(repo_id=os.environ["REPO_ID"], filename=os.environ["MODEL_FILENAME"]) -print(downloaded_model_path) -EOF - - LANGUAGE_MODEL_PATH=$(python3 /tmp/ensure_model.py) - echo ${LANGUAGE_MODEL_PATH} - rm /tmp/ensure_model.py -} - -train_model() { - python3 ${NEMO_REPO_DIR}/examples/nlp/language_modeling/megatron_gpt_prompt_learning.py \ - name=sentiment_intent_slot_p_tuning \ - model.global_batch_size=64 \ - model.data.train_ds=["${SENTIMENT_DIR}/FinancialPhraseBank-v1.0/financial_phrase_bank_train.jsonl","${ASSISTANT_DIR}/assistant_train.jsonl"] \ - model.data.validation_ds=["${SENTIMENT_DIR}/FinancialPhraseBank-v1.0/financial_phrase_bank_val.jsonl","${ASSISTANT_DIR}/assistant_val.jsonl"] \ - 'model.task_templates=[{taskname:sentiment,prompt_template:"<|VIRTUAL_PROMPT_0|> {sentence} sentiment:{label}",total_virtual_tokens:10,virtual_token_splits:[10],truncate_field:None,answer_only_loss:true,answer_field:label},{taskname:intent_and_slot,prompt_template:"<|VIRTUAL_PROMPT_0|> Predict intent and slot <|VIRTUAL_PROMPT_1|> :\n{utterance}{label}",total_virtual_tokens:10,virtual_token_splits:[7,3],truncate_field:None,answer_only_loss:false}]' \ - model.existing_tasks=[] \ - model.new_tasks=[sentiment,intent_and_slot] \ - model.virtual_prompt_style=p-tuning \ - model.language_model_path=${LANGUAGE_MODEL_PATH} -} - - -# if run in distributed environment -readonly LOCAL_RANK="${LOCAL_RANK:=${SLURM_LOCALID:=${OMPI_COMM_WORLD_LOCAL_RANK:-}}}" -if [ -n "${LOCAL_RANK}" ]; then - if [ "${LOCAL_RANK}" = "0" ]; then - download_and_preprocess_data - download_model - touch /tmp/local_rank0_finished - else - echo ${LOCAL_RANK} waits for the temporary file to appear - while [ ! -f /tmp/local_rank0_finished ]; do - sleep 10 - done - fi - train_model - [ "${LOCAL_RANK}" -eq 0 ] && rm /tmp/local_rank0_finished -else - download_and_preprocess_data - download_model - train_model -fi \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/README.md b/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/README.md deleted file mode 100644 index 26d0f1ee93bbbfecc41cead2210ae7b4f61e1882..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/README.md +++ /dev/null @@ -1,69 +0,0 @@ - - -# Online learning example on MNIST dataset - -## Overview - -The example presents a simple Online Learning concept based on MNIST dataset. -There are two models: inference and training and both are deployed on Triton Inference Server. -Inference model is used for inference requests and training model is used for training. -Inference model is replaced with training model after each training epoch. - -Example consists of following scripts: - -- `install.sh` - install additional dependencies -- `server.py` - start the model with Triton Inference Server -- `client_infer.py` - execute HTTP/gRPC requests to the deployed model -- `client_train.py` - execute HTTP/gRPC requests to the deployed model for training -- `model.py` - model definition - -## Quick Start - -The step-by-step guide: - -1. Install `pytriton` following the [installation instruction](../../README.md#installation) -2. Install the additional packages using `install.sh` - -```shell -./install.sh -``` - -3. In current terminal start the model on Triton using `server.py` - -```shell -./server.py -``` - -4. Open new terminal tab (ex. `Ctrl + T` on Ubuntu) or window -5. Go to the example directory -6. Run the `client_infer.py` to perform inference requests (with test dataset) on the deployed model -and calculate accuracy. At the beginning accuracy should be around 10% (random predictions). -In the next steps you will run the training, so after a while accuracy should increase. - -```shell -./client_infer.py -``` - -7. Open new terminal tab (ex. `Ctrl + T` on Ubuntu) or window -8. Go to the example directory -9. Run the `client_train.py` to perform training inference with training dataset on the training model -(The script communicates epoch number). The inference model will be replaced with training model after each epoch, -so in the ./client_infer.py terminal you should see the accuracy increasing. - -```shell -./client_train.py -``` diff --git a/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/__init__.py b/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/client_infer.py b/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/client_infer.py deleted file mode 100644 index 693652b15e43c58bb7cf6f8c245147bdd6b80ec4..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/client_infer.py +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Client for online_learning sample server.""" -import argparse -import logging - -import torch # pytype: disable=import-error -import torch.nn.functional as functional # pytype: disable=import-error -from torchvision import datasets, transforms # pytype: disable=import-error - -from pytriton.client import ModelClient - -LOGGER = logging.getLogger("examples.online_learning_mnist.client") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - -def main(): - global args - parser = argparse.ArgumentParser(description="Inference client") - parser.add_argument("--iter", required=False, default=300, type=int, help="Number of iterations to run") - args = parser.parse_args() - transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]) - dataset2 = datasets.MNIST("../data", train=False, transform=transform, download=True) - test_loader = torch.utils.data.DataLoader(dataset2, batch_size=64) - LOGGER.info("Inference results:") - with ModelClient("localhost", "MnistInfer") as client: - with torch.no_grad(): - for _ in range(args.iter): - test_loss = 0 - correct = 0 - for _batch_idx, (data, target) in enumerate(test_loader): - data_np = data.numpy() - inference_results = client.infer_batch(image=data_np) - prediction_np = inference_results["predictions"] - prediction = torch.from_numpy(prediction_np) - - test_loss += functional.nll_loss(prediction, target, reduction="sum").item() # sum up batch loss - pred = prediction.argmax(dim=1, keepdim=True) # get the index of the max log-probability - correct += pred.eq(target.view_as(pred)).sum().item() - - test_loss /= len(test_loader.dataset) - LOGGER.info( - "\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n".format( - test_loss, correct, len(test_loader.dataset), 100.0 * correct / len(test_loader.dataset) - ) - ) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/client_train.py b/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/client_train.py deleted file mode 100644 index e476bfe40bdd68fee8ec9c981bc7aec1c7f1b1e3..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/client_train.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Client for online_learning sample server.""" -import logging - -import torch # pytype: disable=import-error -from torchvision import datasets, transforms # pytype: disable=import-error - -from pytriton.client import ModelClient - -LOGGER = logging.getLogger("examples.online_learning_mnist.client_train") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - -def main(): - transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]) - train_dataset = datasets.MNIST("../data", train=True, download=True, transform=transform) - train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=64) - epoch = 0 - epoch_size = 134 - with ModelClient("localhost", "MnistTrain") as client: - LOGGER.info("Training:") - for _ in range(2): - for batch_idx, (data, target) in enumerate(train_loader): - if batch_idx % epoch_size == 0: - LOGGER.info(f"Epoch: {epoch}") - epoch += 1 - data = data.numpy() - target = target.numpy() - target = target.reshape((target.shape[0], 1)) - - # In this example, train inference returns the laste training loss in 'results' array - client.infer_batch(image=data, target=target) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/install.sh b/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/install.sh deleted file mode 100644 index 283866c5afffc05f8f43df81552e9bdc7c972870..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/install.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -set -xe - -pip install numpy \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/model.py b/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/model.py deleted file mode 100644 index faf13a498b541dfb715815a664fdb682369b6bfb..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/model.py +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python3 -# BSD 3-Clause License -# -# Copyright (c) 2017, -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# 1. Redistributions of source code must retain the above copyright notice, this -# list of conditions and the following disclaimer. -# -# 2. Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# license-headers: type="bsd-3-clause" - -import torch # pytype: disable=import-error -import torch.nn as nn # pytype: disable=import-error -import torch.nn.functional as functional # pytype: disable=import-error - - -# Network structure is based on original PyTorch MNIST example. -class Net(nn.Module): - def __init__(self): - super().__init__() - self.conv1 = nn.Conv2d(1, 32, 3, 1) - self.conv2 = nn.Conv2d(32, 64, 3, 1) - self.dropout1 = nn.Dropout(0.25) - self.dropout2 = nn.Dropout(0.5) - self.fc1 = nn.Linear(9216, 128) - self.fc2 = nn.Linear(128, 10) - - def forward(self, x): - x = self.conv1(x) - x = functional.relu(x) - x = self.conv2(x) - x = functional.relu(x) - x = functional.max_pool2d(x, 2) - x = self.dropout1(x) - x = torch.flatten(x, 1) - x = self.fc1(x) - x = functional.relu(x) - x = self.dropout2(x) - x = self.fc2(x) - output = functional.log_softmax(x, dim=1) - return output diff --git a/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/server.py b/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/server.py deleted file mode 100644 index d487fcb9613ed644f196446fa3a1f7052c650ae2..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/online_learning_mnist/server.py +++ /dev/null @@ -1,199 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Server shows online learning model concept.""" -import argparse -import logging -import threading -from queue import Queue -from threading import Lock - -import numpy as np -import torch # pytype: disable=import-error -import torch.nn.functional as functional # pytype: disable=import-error -import torch.optim as optim # pytype: disable=import-error -from torch.optim.lr_scheduler import StepLR # pytype: disable=import-error - -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton, TritonConfig - -from model import Net # pytype: disable=import-error # isort:skip - -LOGGER = logging.getLogger("examples.online_learning_mnist.server") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - -class Trainer: - """Trainer class for MNIST model. - It is used to train the model and to keep track of the training progress. - It defines the learning rate scheduler and optimizer. - It organizes the training process in epochs. - """ - - def __init__(self, model, lr, gamma, epoch_size): - self.model = model - self.optimizer = optim.Adadelta(model.parameters(), lr=lr) - self.scheduler = StepLR(self.optimizer, step_size=1, gamma=gamma) - self.iter = 0 - self.epoch = 0 - self.epoch_size = epoch_size - - def train_batch(self, data, target): - self.optimizer.zero_grad() - output = self.model(data) - loss = functional.nll_loss(output, target) - loss.backward() - self.optimizer.step() - self.iter += 1 - return loss.item() - - def ready(self): - return self.iter >= self.epoch_size - - def next_epoch(self): - self.iter = 0 - self.epoch += 1 - self.scheduler.step() - - -class OnlineLearning(threading.Thread): - """Online learning class that implements two infer functions: train and infer. - Infer function is used in inference endpoint and train function is used in training endpoint. - Train function collects data and trains model in background thread. - Infer function uses trained model to make inference. - When trained model is ready, it is swapped with infer model. - """ - - def __init__(self, device, lr, gamma, epoch_size, max_queue_size): - super().__init__() - self.device = device - - self.trained_model = Net().to(self.device) - self.trained_model.train() - self.infer_model = Net().to(self.device) - self.infer_model.eval() - self.stopped = False - self.train_data_queue = Queue(maxsize=max_queue_size) - - self.lock = Lock() - self.trainer = Trainer(self.trained_model, lr, gamma, epoch_size) - self.last_loss = 0.0 - - def run(self) -> None: - while not self.stopped: - image, target = self.train_data_queue.get() - if self.stopped: - return - - data_tensor = torch.from_numpy(image).to(self.device) - labels = target.reshape((target.shape[0],)) - labels_tensor = torch.from_numpy(labels).to(self.device) - self.last_loss = self.trainer.train_batch(data_tensor, labels_tensor) - - if self.trainer.ready(): - self.replace_inference_model() - self.trainer.next_epoch() - - def stop(self): - self.stopped = True - self.train_data_queue.put((None, None)) - self.join() - - def replace_inference_model(self): - with self.lock: - self.infer_model.load_state_dict(self.trained_model.state_dict()) - - def train(self, requests): - """Train function is used in training endpoint.""" - # concatenate all requests into one batch. No need for padding due to fixed image dimensions - images = np.concatenate([request["image"] for request in requests], axis=0) - targets = np.concatenate([request["target"] for request in requests], axis=0) - self.train_data_queue.put((images, targets)) - return [{"last_loss": np.array([[self.last_loss]]).astype(np.float32)} for _ in requests] - - @batch - def infer(self, image): - """Infer function is used in inference endpoint.""" - data_tensor = torch.from_numpy(image).to(self.device) - with self.lock: - res = self.infer_model(data_tensor) - res = res.numpy(force=True) - return {"predictions": res} - - -def _parse_args(): - parser = argparse.ArgumentParser() - parser.add_argument( - "--verbose", - "-v", - action="store_true", - help="Enable verbose logging in debug mode.", - ) - return parser.parse_args() - - -def main(): - args = _parse_args() - log_verbose = 1 if args.verbose else 0 - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - online_learning_model = OnlineLearning( - device=torch.device("cuda"), lr=1.0, gamma=0.7, epoch_size=134, max_queue_size=1000 - ) - online_learning_model.start() - try: - with Triton(config=TritonConfig(log_verbose=log_verbose)) as triton: - LOGGER.info("Loading OnlineLearning model") - triton.bind( - model_name="MnistTrain", - infer_func=online_learning_model.train, - inputs=[ - # image for training - Tensor(name="image", dtype=np.float32, shape=(1, 28, 28)), - # target class corresponding to image (class index from 0 to 9) - Tensor(name="target", dtype=np.int64, shape=(1,)), - ], - outputs=[ - # last loss value batch - Tensor(name="last_loss", dtype=np.float32, shape=(1,)), - ], - config=ModelConfig(max_batch_size=64), - strict=True, - ) - triton.bind( - model_name="MnistInfer", - infer_func=online_learning_model.infer, - inputs=[ - # image for classification - Tensor(name="image", dtype=np.float32, shape=(1, 28, 28)), - ], - outputs=[ - # predictions taken from softmax layer - Tensor(name="predictions", dtype=np.float32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=64), - strict=True, - ) - - LOGGER.info("Serving model") - triton.serve() - finally: - LOGGER.info("Stopping online learning model") - online_learning_model.stop() - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/examples/perf_analyzer/README.md b/stf/stf-api-alternative/pytriton/examples/perf_analyzer/README.md deleted file mode 100644 index edc9153963fde02c97560c32105f71fe5849df74..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/perf_analyzer/README.md +++ /dev/null @@ -1,72 +0,0 @@ - - -# Using Perf Analyzer for profiling HuggingFace BART model - -## Overview - -The example presents profiling of HuggingFace BART model using [Perf -Analyzer](https://github.com/triton-inference-server/client/blob/main/src/c++/perf_analyzer/README.md) - -Example consists of following scripts: - -- `install.sh` - install additional packages and libraries required to run the example -- `server.py` - start the model with Triton Inference Server -- `client.py` - execute HTTP/gRPC requests to the deployed model - -## Requirements - -The example requires the `torch` package. It can be installed in your current environment using pip: - -```shell -pip install torch -``` - -Or you can use NVIDIA PyTorch container: - -```shell -docker run -it --gpus 1 --shm-size 8gb -v {repository_path}:{repository_path} -w {repository_path} nvcr.io/nvidia/pytorch:23.10-py3 bash -``` - -If you select to use container we recommend to install -[NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/overview.html). - -## Quick Start - -The step-by-step guide: - -1. Install PyTriton following - the [installation instruction](../../README.md#installation) -2. Install the additional packages using `install.sh` - -```shell -./install.sh -``` - -3. In current terminal start the model on Triton using `server.py` - -```shell -./server.py -``` - -4. Open new terminal tab (ex. `Ctrl + T` on Ubuntu) or window -5. Go to the example directory -6. Run the `client.sh` to perform queries on model: - -```shell -./client.sh -``` - diff --git a/stf/stf-api-alternative/pytriton/examples/perf_analyzer/__init__.py b/stf/stf-api-alternative/pytriton/examples/perf_analyzer/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/perf_analyzer/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/perf_analyzer/client.sh b/stf/stf-api-alternative/pytriton/examples/perf_analyzer/client.sh deleted file mode 100644 index a6a2e4983924d9686cbca81ad262d502be18c6ca..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/perf_analyzer/client.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -INPUT_DATA="${THIS_SCRIPT_DIR}/input-data.json" - -perf_analyzer -u 127.0.0.1:8001 \ - -i grpc \ - -m BART \ - --measurement-mode count_windows \ - --measurement-request-count 100 \ - --input-data ${INPUT_DATA} \ - --concurrency-range 4:16:4 \ - -v diff --git a/stf/stf-api-alternative/pytriton/examples/perf_analyzer/input-data.json b/stf/stf-api-alternative/pytriton/examples/perf_analyzer/input-data.json deleted file mode 100644 index 4e9436dfd45fb4c266ea53ed8d74f51578f7e2fc..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/perf_analyzer/input-data.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "data": [ - { - "sequence": { - "content": [ - "one day I will see the world" - ], - "shape": [ - 1 - ] - }, - "labels": { - "content": [ - "travel", - "cooking", - "dancing" - ], - "shape": [ - 3 - ] - } - } - ] -} \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/perf_analyzer/install.sh b/stf/stf-api-alternative/pytriton/examples/perf_analyzer/install.sh deleted file mode 100644 index 23a3fee19d35a4bf93c23e8812cc298d268d018b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/perf_analyzer/install.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -set -xe - -echo "Installing libb64-dev required by Perf Analyzer" -apt-get update -apt-get install -y libb64-dev -pip install transformers diff --git a/stf/stf-api-alternative/pytriton/examples/perf_analyzer/server.py b/stf/stf-api-alternative/pytriton/examples/perf_analyzer/server.py deleted file mode 100644 index 17b4c24a17452a170e191512114efac7ec456015..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/perf_analyzer/server.py +++ /dev/null @@ -1,64 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Simple classifier example based on Hugging Face Pytorch BART model.""" -import logging - -import numpy as np -from transformers import pipeline # pytype: disable=import-error - -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton - -logger = logging.getLogger("examples.perf_analyzer.server") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - -classifier = pipeline("zero-shot-classification", model="facebook/bart-large-mnli", device=0) - - -@batch -def _infer_fn(**inputs: np.ndarray): - sequences_batch, labels_batch = inputs.values() - - # need to convert dtype=object to bytes first - # end decode unicode bytes - sequences_batch = np.char.decode(sequences_batch.astype("bytes"), "utf-8") - labels_batch = np.char.decode(labels_batch.astype("bytes"), "utf-8") - - scores = [] - for sequence, labels in zip(sequences_batch, labels_batch): - classification_result = classifier(sequence.item(), labels.tolist()) - scores.append(classification_result["scores"]) - scores_batch = np.array(scores, dtype=np.float32) - return {"scores": scores_batch} - - -with Triton() as triton: - logger.info("Loading BART model.") - triton.bind( - model_name="BART", - infer_func=_infer_fn, - inputs=[ - Tensor(name="sequence", dtype=np.bytes_, shape=(1,)), - Tensor(name="labels", dtype=np.bytes_, shape=(-1,)), - ], - outputs=[ - Tensor(name="scores", dtype=np.float32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=8), - strict=True, - ) - logger.info("Serving inference") - triton.serve() diff --git a/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/README.md b/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/README.md deleted file mode 100644 index f5b85f20d5c34744107a9e6c7baa27a971c28b37..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/README.md +++ /dev/null @@ -1,69 +0,0 @@ - - -# Simple python remote mode example models - -## Overview - -The example presents a simple python example of remote mode setup (one model is local and two are remote, setup -from separate python scripts). - - -Example consists of following scripts: - -server_remote_mul.py -server_remote_power.py -server_starting_triton.py - -- `install.sh` - install additional dependencies -- `server_starting_triton.py` - start the model locally in Triton Inference Server -- `server_remote_mul.py` - start the model remotely in Triton Inference Server -- `server_remote_power.py` - start the other model remotely in Triton Inference Server -- `client.py` - execute HTTP/gRPC requests to the deployed model - -## Quick Start - -The step-by-step guide: - -1. Install NVIDIA PyTriton following the [installation instruction](../../README.md#installation) -2. Install the additional packages using `install.sh` - -```shell -./install.sh -``` - -3. In separate terminals first start triton server using `server_starting_triton.py` and then start -remote models using `server_remote_mul.py` and `server_remote_power.py`: - -```shell -./server_starting_triton.py -``` - -```shell -./server_remote_mul.py -``` - -```shell -./server_remote_power.py -``` - -4. Open new terminal tab (ex. `Ctrl + T` on Ubuntu) or window -5. Go to the example directory -6. Run the `client.py` to perform queries on model: - -```shell -./client.py -``` diff --git a/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/__init__.py b/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/client.py b/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/client.py deleted file mode 100644 index 0c716397719301415650c20e2f6c2a4a5b3e6285..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/client.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Client for simple_python_remote_mode sample server.""" -import logging - -import numpy as np - -from pytriton.client import ModelClient - -logger = logging.getLogger("examples.simple_python_remote_mode.client") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - -batch_size = 2 -a_batch = np.array([[1.0], [2.0]], dtype=np.float32) -b_batch = np.array([[2.0], [3.0]], dtype=np.float32) - -logger.info(f"a: {a_batch.tolist()}") -logger.info(f"b: {b_batch.tolist()}") - -with ModelClient("localhost", "AddSub", model_version="1") as client: - logger.info("Sending inference request") - result_batch = client.infer_batch(a_batch, b_batch) - - for output_name, data_batch in result_batch.items(): - logger.info(f"{output_name}: {data_batch.tolist()}") - -with ModelClient("localhost", "Mul") as client: - logger.info("Sending inference request") - result_batch = client.infer_batch(a_batch, b_batch) - - for output_name, data_batch in result_batch.items(): - logger.info(f"{output_name}: {data_batch.tolist()}") - -with ModelClient("localhost", "Power") as client: - logger.info("Sending inference request") - result_batch = client.infer_batch(a_batch, b_batch) - - for output_name, data_batch in result_batch.items(): - logger.info(f"{output_name}: {data_batch.tolist()}") diff --git a/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/install.sh b/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/install.sh deleted file mode 100644 index 7016a27e64d09cd7c26946aab1fd8d5d04bb3b49..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/install.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -pip install numpy \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/server_remote_mul.py b/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/server_remote_mul.py deleted file mode 100644 index 15f60066205bb6bd05396827e4df1e89045d5400..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/server_remote_mul.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Starting PyTriton in remote mode and binding model calculating element wise multiplication.""" -import logging - -import numpy as np - -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import RemoteTriton - -logger = logging.getLogger("examples.simple_python_remote_mode.server_remote_mul") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - -@batch -def _mul(**inputs): - a_batch, b_batch = inputs.values() - dot_result = np.multiply(a_batch, b_batch) - return {"mul": dot_result} - - -with RemoteTriton(url="localhost") as remote_triton: - remote_triton.bind( - model_name="Mul", - infer_func=_mul, - inputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - Tensor(dtype=np.float32, shape=(-1,)), - ], - outputs=[ - Tensor(name="mul", dtype=np.float32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128), - ) - - remote_triton.serve() diff --git a/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/server_remote_power.py b/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/server_remote_power.py deleted file mode 100644 index 26e98cee0d1f62d9af16ca0b611b078ac8cb6a4b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/server_remote_power.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Starting PyTriton in remote mode and binding model calculating element wise power.""" -import logging - -import numpy as np - -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import RemoteTriton - -logger = logging.getLogger("examples.simple_python_remote_mode.server_remote_power") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - -@batch -def _power(**inputs): - a_batch, b_batch = inputs.values() - dot_result = np.power(a_batch, b_batch) - return {"power": dot_result} - - -with RemoteTriton(url="localhost") as remote_triton: - remote_triton.bind( - model_name="Power", - infer_func=_power, - inputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - Tensor(dtype=np.float32, shape=(-1,)), - ], - outputs=[ - Tensor(name="power", dtype=np.float32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128), - ) - - remote_triton.serve() diff --git a/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/server_starting_triton.py b/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/server_starting_triton.py deleted file mode 100644 index 19562499134b450b6c05975ec9d4af413d7a5144..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/simple_python_remote_mode/server_starting_triton.py +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Server starting triton with simple python model performing adding and subtract operation.""" -import logging - -import numpy as np - -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton - -logger = logging.getLogger("examples.simple_python_remote_mode.server_starting_triton") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - -@batch -def _add_sub(**inputs): - a_batch, b_batch = inputs.values() - add_batch = a_batch + b_batch - sub_batch = a_batch - b_batch - return {"add": add_batch, "sub": sub_batch} - - -with Triton() as triton: - logger.info("Loading and serve AddSub model") - - # triton.bind() is optional here (you can use Triton class for starting server only without binding any model - # and then use RemoteTriton class from separate script to bind model). - triton.bind( - model_name="AddSub", - infer_func=_add_sub, - inputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - Tensor(dtype=np.float32, shape=(-1,)), - ], - outputs=[ - Tensor(name="add", dtype=np.float32, shape=(-1,)), - Tensor(name="sub", dtype=np.float32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128), - ) - logger.info("Blocks script while serving model") - triton.serve() diff --git a/stf/stf-api-alternative/pytriton/examples/use_parameters_and_headers/README.md b/stf/stf-api-alternative/pytriton/examples/use_parameters_and_headers/README.md deleted file mode 100644 index 5592a739c9806d5ec1169a28705b869ce95ffc25..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/use_parameters_and_headers/README.md +++ /dev/null @@ -1,53 +0,0 @@ - - -# Use custom parameters and headers - -## Overview - -The example presents a simple Add-Sub model which perform an addition and subtraction operations -on passed input data and scale them using parameters and http headers send to model by client. - -Example consists of following scripts: - -- `install.sh` - install additional dependencies -- `server.py` - start the model with Triton Inference Server -- `client.py` - execute HTTP/gRPC requests to the deployed model - -## Quick Start - -The step-by-step guide: - -1. Install PyTriton following the [installation instruction](../../README.md#installation) -2. Install the additional packages using `install.sh` - -```shell -./install.sh -``` - -3. In current terminal start the model on Triton using `server.py` - -```shell -./server.py -``` - -4. Open new terminal tab (ex. `Ctrl + T` on Ubuntu) or window -5. Go to the example directory -6. Run the `client.py` to perform queries on model: - -```shell -./client.py -``` diff --git a/stf/stf-api-alternative/pytriton/examples/use_parameters_and_headers/__init__.py b/stf/stf-api-alternative/pytriton/examples/use_parameters_and_headers/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/use_parameters_and_headers/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/examples/use_parameters_and_headers/client.py b/stf/stf-api-alternative/pytriton/examples/use_parameters_and_headers/client.py deleted file mode 100644 index ea89c57423892c90df1c5b4282c3344835e3bddc..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/use_parameters_and_headers/client.py +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Client for use_parameters_and_headers sample server.""" -import logging - -import numpy as np - -from pytriton.client import ModelClient - -logger = logging.getLogger("examples.use_parameters_and_headers.client") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - -batch_size = 2 -a_batch = np.ones((batch_size, 1), dtype=np.float32) * 2 -b_batch = np.ones((batch_size, 1), dtype=np.float32) - -logger.info(f"a: {a_batch.tolist()}") -logger.info(f"b: {b_batch.tolist()}") - -with ModelClient("localhost", "ParamsAndHeaders") as client: - logger.info("Sending inference request") - result_batch = client.infer_batch( - a_batch, b_batch, parameters={"parameter_multiplier": 2}, headers={"header_divisor": 3} - ) - -for output_name, data_batch in result_batch.items(): - logger.info(f"{output_name}: {data_batch.tolist()}") diff --git a/stf/stf-api-alternative/pytriton/examples/use_parameters_and_headers/install.sh b/stf/stf-api-alternative/pytriton/examples/use_parameters_and_headers/install.sh deleted file mode 100644 index 8dcc654c24ed6f3ff9d60b5d6728e1508252a79f..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/use_parameters_and_headers/install.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -pip install numpy \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/examples/use_parameters_and_headers/server.py b/stf/stf-api-alternative/pytriton/examples/use_parameters_and_headers/server.py deleted file mode 100644 index 44c9a93729e7d653bb3a243a606bd26d0ddafca8..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/examples/use_parameters_and_headers/server.py +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Server with simple python model performing adding and subtract operation using custom headers and parameters.""" -import logging - -import numpy as np - -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton, TritonConfig - -logger = logging.getLogger("examples.use_parameters_and_headers.server") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - -def _infer_with_params_and_headers(requests): - responses = [] - for req in requests: - a_batch, b_batch = req.values() - scaled_add_batch = (a_batch + b_batch) / float(req.parameters["header_divisor"]) - scaled_sub_batch = (a_batch - b_batch) * float(req.parameters["parameter_multiplier"]) - responses.append({"scaled_add": scaled_add_batch, "scaled_sub": scaled_sub_batch}) - return responses - - -with Triton(config=TritonConfig(http_header_forward_pattern="header.*")) as triton: - logger.info("Loading the model using parameters and headers") - triton.bind( - model_name="ParamsAndHeaders", - infer_func=_infer_with_params_and_headers, - inputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - Tensor(dtype=np.float32, shape=(-1,)), - ], - outputs=[ - Tensor(name="scaled_add", dtype=np.float32, shape=(-1,)), - Tensor(name="scaled_sub", dtype=np.float32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128), - strict=True, - ) - logger.info("Serving model") - triton.serve() diff --git a/stf/stf-api-alternative/pytriton/mkdocs.yml b/stf/stf-api-alternative/pytriton/mkdocs.yml deleted file mode 100644 index 1e0ef53c0df0052632b80e1ddaec9dab6d9df02e..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/mkdocs.yml +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -site_name: PyTriton -theme: - name: material - custom_dir: docs/overrides - logo: assets/logo.png - favicon: assets/favicon.png - palette: - primary: black - features: - - navigation.top -extra_css: - - assets/styles.css -repo_url: https://github.com/triton-inference-server/pytriton -repo_name: Git Repository -plugins: - - autorefs - - htmlproofer - - mike - - mkdocstrings: - enable_inventory: true - handlers: - python: - import: - - https://docs.python.org/3/objects.inv - options: - docstring_style: google - show_root_heading: true - heading_level: 2 - line_length: 120 - docstring_section_style: table - merge_init_into_class: true - - search -watch: - - pytriton - - docs - - mkdocs.yml - - LICENSE - - README.md - - CHANGELOG.md - - CONTRIBUTING.md -view_uri_template: blob/{ref}/{path} # used by scripts/rewrite_links_to_repo.py hook -hooks: - - scripts/rewrite_links_to_repo.py -extra: - version: - provider: mike -markdown_extensions: - - admonition - - pymdownx.details - - pymdownx.highlight: - anchor_linenums: true - - pymdownx.inlinehilite - - pymdownx.snippets - - pymdownx.superfences -nav: - - Home: README.md - - Installation and Packaging: - - Installation: installation.md - - Building binary package: building.md - - Quick Start: quick_start.md - - Deploying Models: - - Triton Initialization: initialization.md - - Binding Model to Triton: binding_models.md - - Binding Configuration: binding_configuration.md - - Deploying in Clusters: deploying_in_clusters.md - - Triton Remote Mode: remote_triton.md - - Inference Handling: - - Inference Callable: inference_callable.md - - Decorators: decorators.md - - Custom parameters/headers: custom_params.md - - Example with downloaded input data: downloaded_input_data.md - - Chunking and batching: chunking_guide.md - - Clients: clients.md - - Examples: examples.md - - API Reference: api.md - - Changelog: CHANGELOG.md - - Known Issues: known_issues.md - - Contributing: CONTRIBUTING.md - - License: LICENSE.md diff --git a/stf/stf-api-alternative/pytriton/nvidia_pytriton.egg-info/PKG-INFO b/stf/stf-api-alternative/pytriton/nvidia_pytriton.egg-info/PKG-INFO deleted file mode 100644 index 80c3c184ea267b9116c3b4dc4745edcbdcb56c50..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/nvidia_pytriton.egg-info/PKG-INFO +++ /dev/null @@ -1,194 +0,0 @@ -Metadata-Version: 2.1 -Name: nvidia-pytriton -Version: 0.4.2 -Summary: PyTriton - Flask/FastAPI-like interface to simplify Triton's deployment in Python environments. -License: Apache 2.0 -Project-URL: Documentation, https://triton-inference-server.github.io/pytriton -Project-URL: Source, https://github.com/triton-inference-server/pytriton -Project-URL: Tracker, https://github.com/triton-inference-server/pytriton/issues -Classifier: Development Status :: 3 - Alpha -Classifier: Intended Audience :: Science/Research -Classifier: Intended Audience :: Developers -Classifier: Topic :: Software Development -Classifier: Topic :: Scientific/Engineering -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Operating System :: Unix -Requires-Python: <4,>=3.8 -Description-Content-Type: text/x-rst -License-File: LICENSE -Requires-Dist: numpy~=1.21 -Requires-Dist: protobuf>=3.7.0 -Requires-Dist: pyzmq~=23.0 -Requires-Dist: sh~=1.14 -Requires-Dist: tritonclient[all]~=2.39 -Requires-Dist: typing_inspect~=0.6.0 -Requires-Dist: wrapt>=1.11.0 -Provides-Extra: test -Requires-Dist: pytest~=7.2; extra == "test" -Requires-Dist: pytest-codeblocks~=0.16; extra == "test" -Requires-Dist: pytest-mock~=3.8; extra == "test" -Requires-Dist: pytest-timeout~=2.1; extra == "test" -Requires-Dist: alt-pytest-asyncio~=0.7; extra == "test" -Requires-Dist: pytype!=2021.11.18,!=2022.2.17; extra == "test" -Requires-Dist: pre-commit>=2.20.0; extra == "test" -Requires-Dist: tox>=3.23.1; extra == "test" -Requires-Dist: tqdm>=4.64.1; extra == "test" -Requires-Dist: psutil~=5.1; extra == "test" -Requires-Dist: py-spy~=0.3; extra == "test" -Provides-Extra: doc -Requires-Dist: GitPython>=3.1.30; extra == "doc" -Requires-Dist: mike>=2.0.0; extra == "doc" -Requires-Dist: mkdocs-htmlproofer-plugin>=0.8.0; extra == "doc" -Requires-Dist: mkdocs-material>=8.5.6; extra == "doc" -Requires-Dist: mkdocstrings[python]>=0.19.0; extra == "doc" -Provides-Extra: dev -Requires-Dist: nvidia-pytriton[test]; extra == "dev" -Requires-Dist: nvidia-pytriton[doc]; extra == "dev" -Requires-Dist: black>=22.8; extra == "dev" -Requires-Dist: build<1.0.0,>=0.8; extra == "dev" -Requires-Dist: ipython>=7.16; extra == "dev" -Requires-Dist: isort>=5.10; extra == "dev" -Requires-Dist: pudb>=2022.1.3; extra == "dev" -Requires-Dist: pip>=21.3; extra == "dev" -Requires-Dist: twine>=4.0; extra == "dev" - -.. - Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - -PyTriton -========== - -PyTriton is a Flask/FastAPI-like interface that simplifies Triton's deployment in Python environments. -The library allows serving Machine Learning models directly from Python through -NVIDIA's `Triton Inference Server`_. - -.. _Triton Inference Server: https://github.com/triton-inference-server - -In PyTriton, as in Flask or FastAPI, you can define any Python function that executes a machine learning model prediction and exposes -it through an HTTP/gRPC API. PyTriton installs Triton Inference Server in your environment and uses it for handling -HTTP/gRPC requests and responses. Our library provides a Python API that allows attaching a Python function to Triton -and a communication layer to send/receive data between Triton and the function. This solution helps utilize the -performance features of Triton Inference Server, such as dynamic batching or response cache, without changing your model -environment. Thus, it improves the performance of running inference on GPU for models implemented in Python. The solution is -framework-agnostic and can be used along with frameworks like PyTorch, TensorFlow, or JAX. - - -Installation --------------- - -The package can be installed from `pypi`_ using: - -.. _pypi: https://pypi.org/project/nvidia-pytriton/ - -.. code-block:: text - - pip install -U nvidia-pytriton - -More details about installation can be found in the `documentation`_. - -.. _documentation: https://triton-inference-server.github.io/pytriton/latest/installation/ - -Example ---------- - -The example presents how to run Python model in Triton Inference Server without need to change the current working -environment. In the example we are using a simple `Linear` PyTorch model. - -The requirement for the example is to have installed PyTorch in your environment. You can do it running: - - -.. code-block:: text - - pip install torch - -In the next step define the `Linear` model: - -.. code-block:: python - - import torch - - model = torch.nn.Linear(2, 3).to("cuda").eval() - -Create a function for handling inference request: - -.. code-block:: python - - import numpy as np - from pytriton.decorators import batch - - - @batch - def infer_fn(**inputs: np.ndarray): - (input1_batch,) = inputs.values() - input1_batch_tensor = torch.from_numpy(input1_batch).to("cuda") - output1_batch_tensor = model(input1_batch_tensor) # Calling the Python model inference - output1_batch = output1_batch_tensor.cpu().detach().numpy() - return [output1_batch] - - -In the next step, create the connection between the model and Triton Inference Server using the bind method: - -.. code-block:: python - - from pytriton.model_config import ModelConfig, Tensor - from pytriton.triton import Triton - - # Connecting inference callback with Triton Inference Server - with Triton() as triton: - # Load model into Triton Inference Server - triton.bind( - model_name="Linear", - infer_func=infer_fn, - inputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - ], - outputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128) - ) - -Finally, serve the model with Triton Inference Server: - -.. code-block:: python - - from pytriton.triton import Triton - - with Triton() as triton: - ... # Load models here - triton.serve() - -The `bind` method is creating a connection between Triton Inference Server and the `infer_fn` which handle -the inference queries. The `inputs` and `outputs` describe the model inputs and outputs that are exposed in -Triton. The config field allows more parameters for model deployment. - -The `serve` method is blocking and at this point the application will wait for incoming HTTP/gRPC requests. From that -moment the model is available under name `Linear` in Triton server. The inference queries can be sent to -`localhost:8000/v2/models/Linear/infer` which are passed to the `infer_fn` function. - -Links -------- - -* Documentation: https://triton-inference-server.github.io/pytriton -* Source: https://github.com/triton-inference-server/pytriton -* Issues: https://github.com/triton-inference-server/pytriton/issues -* Changelog: https://github.com/triton-inference-server/pytriton/blob/main/CHANGELOG.md -* Known Issues: https://github.com/triton-inference-server/pytriton/blob/main/docs/known_issues.md -* Contributing: https://github.com/triton-inference-server/pytriton/blob/main/CONTRIBUTING.md diff --git a/stf/stf-api-alternative/pytriton/nvidia_pytriton.egg-info/SOURCES.txt b/stf/stf-api-alternative/pytriton/nvidia_pytriton.egg-info/SOURCES.txt deleted file mode 100644 index 185d22c504a668ea3512eaeac6fd651d0a87109f..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/nvidia_pytriton.egg-info/SOURCES.txt +++ /dev/null @@ -1,474 +0,0 @@ -.flake8 -.gitignore -.pre-commit-config.yaml -CHANGELOG.md -CONTRIBUTING.md -COPYRIGHT -LICENSE -Makefile -README.md -mkdocs.yml -pyproject.toml -tox.ini -.github/ISSUE_TEMPLATE/bug_report.md -.github/ISSUE_TEMPLATE/feature_request.md -.github/workflows/stale.yaml -docs/README.md -docs/api.md -docs/binding_configuration.md -docs/binding_models.md -docs/building.md -docs/chunking_guide.md -docs/clients.md -docs/custom_params.md -docs/decorators.md -docs/deploying_in_clusters.md -docs/downloaded_input_data.md -docs/inference_callable.md -docs/initialization.md -docs/installation.md -docs/known_issues.md -docs/pypi.rst -docs/quick_start.md -docs/remote_triton.md -docs/assets/favicon.png -docs/assets/hld.svg -docs/assets/logo.png -docs/assets/styles.css -docs/overrides/partials/copyright.html -examples/README.md -examples/add_sub_notebook/README.md -examples/add_sub_notebook/__init__.py -examples/add_sub_notebook/add_sub.ipynb -examples/add_sub_python/README.md -examples/add_sub_python/__init__.py -examples/add_sub_python/client.py -examples/add_sub_python/install.sh -examples/add_sub_python/server.py -examples/add_sub_python_with_optional/README.md -examples/add_sub_python_with_optional/__init__.py -examples/add_sub_python_with_optional/client.py -examples/add_sub_python_with_optional/install.sh -examples/add_sub_python_with_optional/server.py -examples/dali_resnet101_pytorch/LICENSE -examples/dali_resnet101_pytorch/README.md -examples/dali_resnet101_pytorch/__init__.py -examples/dali_resnet101_pytorch/client.py -examples/dali_resnet101_pytorch/model_inference.py -examples/dali_resnet101_pytorch/server.py -examples/dali_resnet101_pytorch/test_video/orig0.jpg -examples/dali_resnet101_pytorch/test_video/segm0.jpg -examples/dali_resnet101_pytorch/test_video/sintel_trailer_short.mp4 -examples/huggingface_bart_pytorch/README.md -examples/huggingface_bart_pytorch/__init__.py -examples/huggingface_bart_pytorch/client.py -examples/huggingface_bart_pytorch/install.sh -examples/huggingface_bart_pytorch/server.py -examples/huggingface_bart_pytorch/kubernetes/Dockerfile -examples/huggingface_bart_pytorch/kubernetes/build_and_push.sh -examples/huggingface_bart_pytorch/kubernetes/deployment/Chart.yaml -examples/huggingface_bart_pytorch/kubernetes/deployment/values.yaml -examples/huggingface_bart_pytorch/kubernetes/deployment/templates/_helpers.tpl -examples/huggingface_bart_pytorch/kubernetes/deployment/templates/deployment.yaml -examples/huggingface_bart_pytorch/kubernetes/deployment/templates/service.yaml -examples/huggingface_bart_pytorch/kubernetes/test/Chart.yaml -examples/huggingface_bart_pytorch/kubernetes/test/values.yaml -examples/huggingface_bart_pytorch/kubernetes/test/templates/_helpers.tpl -examples/huggingface_bart_pytorch/kubernetes/test/templates/job.yaml -examples/huggingface_bert_jax/README.md -examples/huggingface_bert_jax/__init__.py -examples/huggingface_bert_jax/client.py -examples/huggingface_bert_jax/install.sh -examples/huggingface_bert_jax/server.py -examples/huggingface_dialogpt_streaming_pytorch/README.md -examples/huggingface_dialogpt_streaming_pytorch/__init__.py -examples/huggingface_dialogpt_streaming_pytorch/client.py -examples/huggingface_dialogpt_streaming_pytorch/install.sh -examples/huggingface_dialogpt_streaming_pytorch/server.py -examples/huggingface_opt_multinode_jax/Dockerfile -examples/huggingface_opt_multinode_jax/README.md -examples/huggingface_opt_multinode_jax/client.py -examples/huggingface_opt_multinode_jax/install.sh -examples/huggingface_opt_multinode_jax/modeling_flax_opt.py -examples/huggingface_opt_multinode_jax/opt_utils.py -examples/huggingface_opt_multinode_jax/server.py -examples/huggingface_opt_multinode_jax/kubernetes/Dockerfile -examples/huggingface_opt_multinode_jax/kubernetes/build_and_push.sh -examples/huggingface_opt_multinode_jax/kubernetes/health.sh -examples/huggingface_opt_multinode_jax/kubernetes/persistent-volume-claim-csi.yaml -examples/huggingface_opt_multinode_jax/kubernetes/persistent-volume-claim-nfs.yaml -examples/huggingface_opt_multinode_jax/kubernetes/persistent-volume-nfs.yaml -examples/huggingface_opt_multinode_jax/kubernetes/run.sh -examples/huggingface_opt_multinode_jax/kubernetes/multi-node/Chart.yaml -examples/huggingface_opt_multinode_jax/kubernetes/multi-node/values.yaml -examples/huggingface_opt_multinode_jax/kubernetes/multi-node/templates/_helpers.tpl -examples/huggingface_opt_multinode_jax/kubernetes/multi-node/templates/headless.yaml -examples/huggingface_opt_multinode_jax/kubernetes/multi-node/templates/service.yaml -examples/huggingface_opt_multinode_jax/kubernetes/multi-node/templates/statefulset.yaml -examples/huggingface_opt_multinode_jax/kubernetes/single-node/Chart.yaml -examples/huggingface_opt_multinode_jax/kubernetes/single-node/values.yaml -examples/huggingface_opt_multinode_jax/kubernetes/single-node/templates/_helpers.tpl -examples/huggingface_opt_multinode_jax/kubernetes/single-node/templates/deployment.yaml -examples/huggingface_opt_multinode_jax/kubernetes/single-node/templates/service.yaml -examples/huggingface_opt_multinode_jax/kubernetes/test/Chart.yaml -examples/huggingface_opt_multinode_jax/kubernetes/test/values.yaml -examples/huggingface_opt_multinode_jax/kubernetes/test/templates/_helpers.tpl -examples/huggingface_opt_multinode_jax/kubernetes/test/templates/job.yaml -examples/huggingface_resnet_pytorch/README.md -examples/huggingface_resnet_pytorch/__init__.py -examples/huggingface_resnet_pytorch/client.py -examples/huggingface_resnet_pytorch/install.sh -examples/huggingface_resnet_pytorch/server.py -examples/huggingface_resnet_pytorch/kubernetes/Dockerfile -examples/huggingface_resnet_pytorch/kubernetes/build_and_push.sh -examples/huggingface_resnet_pytorch/kubernetes/deployment/Chart.yaml -examples/huggingface_resnet_pytorch/kubernetes/deployment/values.yaml -examples/huggingface_resnet_pytorch/kubernetes/deployment/templates/_helpers.tpl -examples/huggingface_resnet_pytorch/kubernetes/deployment/templates/deployment.yaml -examples/huggingface_resnet_pytorch/kubernetes/deployment/templates/service.yaml -examples/huggingface_resnet_pytorch/kubernetes/test/Chart.yaml -examples/huggingface_resnet_pytorch/kubernetes/test/values.yaml -examples/huggingface_resnet_pytorch/kubernetes/test/templates/_helpers.tpl -examples/huggingface_resnet_pytorch/kubernetes/test/templates/job.yaml -examples/huggingface_stable_diffusion/README.md -examples/huggingface_stable_diffusion/__init__.py -examples/huggingface_stable_diffusion/client.py -examples/huggingface_stable_diffusion/install.sh -examples/huggingface_stable_diffusion/server.py -examples/huggingface_stable_diffusion/kubernetes/Dockerfile -examples/huggingface_stable_diffusion/kubernetes/build_and_push.sh -examples/huggingface_stable_diffusion/kubernetes/deployment/Chart.yaml -examples/huggingface_stable_diffusion/kubernetes/deployment/values.yaml -examples/huggingface_stable_diffusion/kubernetes/deployment/templates/_helpers.tpl -examples/huggingface_stable_diffusion/kubernetes/deployment/templates/deployment.yaml -examples/huggingface_stable_diffusion/kubernetes/deployment/templates/service.yaml -examples/huggingface_stable_diffusion/kubernetes/test/Chart.yaml -examples/huggingface_stable_diffusion/kubernetes/test/values.yaml -examples/huggingface_stable_diffusion/kubernetes/test/templates/_helpers.tpl -examples/huggingface_stable_diffusion/kubernetes/test/templates/job.yaml -examples/identity_python/README.md -examples/identity_python/__init__.py -examples/identity_python/client.py -examples/identity_python/install.sh -examples/identity_python/server.py -examples/linear_cupy/README.md -examples/linear_cupy/__init__.py -examples/linear_cupy/client.py -examples/linear_cupy/install.sh -examples/linear_cupy/server.py -examples/linear_cupy_notebook/README.md -examples/linear_cupy_notebook/__init__.py -examples/linear_cupy_notebook/linear.ipynb -examples/linear_random_pytorch/README.md -examples/linear_random_pytorch/__init__.py -examples/linear_random_pytorch/client.py -examples/linear_random_pytorch/server.py -examples/mlp_random_tensorflow2/README.md -examples/mlp_random_tensorflow2/__init__.py -examples/mlp_random_tensorflow2/client.py -examples/mlp_random_tensorflow2/server.py -examples/multi_instance_resnet50_pytorch/README.md -examples/multi_instance_resnet50_pytorch/__init__.py -examples/multi_instance_resnet50_pytorch/client.sh -examples/multi_instance_resnet50_pytorch/install.sh -examples/multi_instance_resnet50_pytorch/server.py -examples/multiple_models_python/README.md -examples/multiple_models_python/__init__.py -examples/multiple_models_python/client.py -examples/multiple_models_python/install.sh -examples/multiple_models_python/server.py -examples/nemo_megatron_gpt_multinode/README.md -examples/nemo_megatron_gpt_multinode/__init__.py -examples/nemo_megatron_gpt_multinode/client.py -examples/nemo_megatron_gpt_multinode/gpt.py -examples/nemo_megatron_gpt_multinode/helpers.py -examples/nemo_megatron_gpt_multinode/server.py -examples/nemo_megatron_gpt_multinode/train_prompt_learning_model.sh -examples/nemo_megatron_gpt_multinode/kubernetes/Dockerfile -examples/nemo_megatron_gpt_multinode/kubernetes/build_and_push.sh -examples/nemo_megatron_gpt_multinode/kubernetes/health.sh -examples/nemo_megatron_gpt_multinode/kubernetes/persistent-volume-claim-csi.yaml -examples/nemo_megatron_gpt_multinode/kubernetes/persistent-volume-claim-nfs.yaml -examples/nemo_megatron_gpt_multinode/kubernetes/persistent-volume-nfs.yaml -examples/nemo_megatron_gpt_multinode/kubernetes/run.sh -examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/Chart.yaml -examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/values.yaml -examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/templates/_helpers.tpl -examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/templates/headless.yaml -examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/templates/service.yaml -examples/nemo_megatron_gpt_multinode/kubernetes/multi-node/templates/statefulset.yaml -examples/nemo_megatron_gpt_multinode/kubernetes/single-node/Chart.yaml -examples/nemo_megatron_gpt_multinode/kubernetes/single-node/values.yaml -examples/nemo_megatron_gpt_multinode/kubernetes/single-node/templates/_helpers.tpl -examples/nemo_megatron_gpt_multinode/kubernetes/single-node/templates/deployment.yaml -examples/nemo_megatron_gpt_multinode/kubernetes/single-node/templates/service.yaml -examples/nemo_megatron_gpt_multinode/kubernetes/test/Chart.yaml -examples/nemo_megatron_gpt_multinode/kubernetes/test/values.yaml -examples/nemo_megatron_gpt_multinode/kubernetes/test/templates/_helpers.tpl -examples/nemo_megatron_gpt_multinode/kubernetes/test/templates/job.yaml -examples/online_learning_mnist/README.md -examples/online_learning_mnist/__init__.py -examples/online_learning_mnist/client_infer.py -examples/online_learning_mnist/client_train.py -examples/online_learning_mnist/install.sh -examples/online_learning_mnist/model.py -examples/online_learning_mnist/server.py -examples/perf_analyzer/README.md -examples/perf_analyzer/__init__.py -examples/perf_analyzer/client.sh -examples/perf_analyzer/input-data.json -examples/perf_analyzer/install.sh -examples/perf_analyzer/server.py -examples/simple_python_remote_mode/README.md -examples/simple_python_remote_mode/__init__.py -examples/simple_python_remote_mode/client.py -examples/simple_python_remote_mode/install.sh -examples/simple_python_remote_mode/server_remote_mul.py -examples/simple_python_remote_mode/server_remote_power.py -examples/simple_python_remote_mode/server_starting_triton.py -examples/use_parameters_and_headers/README.md -examples/use_parameters_and_headers/__init__.py -examples/use_parameters_and_headers/client.py -examples/use_parameters_and_headers/install.sh -examples/use_parameters_and_headers/server.py -nvidia_pytriton.egg-info/PKG-INFO -nvidia_pytriton.egg-info/SOURCES.txt -nvidia_pytriton.egg-info/dependency_links.txt -nvidia_pytriton.egg-info/requires.txt -nvidia_pytriton.egg-info/top_level.txt -pytriton/__init__.py -pytriton/constants.py -pytriton/decorators.py -pytriton/exceptions.py -pytriton/triton.py -pytriton/client/__init__.py -pytriton/client/asyncio_utils.py -pytriton/client/client.py -pytriton/client/exceptions.py -pytriton/client/utils.py -pytriton/client/warnings.py -pytriton/model_config/__init__.py -pytriton/model_config/common.py -pytriton/model_config/generator.py -pytriton/model_config/model_config.py -pytriton/model_config/parser.py -pytriton/model_config/tensor.py -pytriton/model_config/triton_model_config.py -pytriton/models/__init__.py -pytriton/models/manager.py -pytriton/models/model.py -pytriton/proxy/__init__.py -pytriton/proxy/communication.py -pytriton/proxy/inference_handler.py -pytriton/proxy/model.py -pytriton/proxy/types.py -pytriton/proxy/validators.py -pytriton/server/__init__.py -pytriton/server/python_backend_config.py -pytriton/server/triton_server.py -pytriton/server/triton_server_config.py -pytriton/utils/__init__.py -pytriton/utils/dataclasses.py -pytriton/utils/distribution.py -pytriton/utils/endpoint_utils.py -pytriton/utils/logging.py -pytriton/utils/workspace.py -scripts/Dockerfile.build -scripts/add_libs_to_wheel.sh -scripts/auditwheel_patched.py -scripts/build_python_stubs.sh -scripts/build_triton.sh -scripts/build_wheel.sh -scripts/extract_triton.sh -scripts/rewrite_links_to_repo.py -tests/__init__.py -tests/utils.py -tests/functional/__init__.py -tests/functional/L0_decoupled_mode/__init__.py -tests/functional/L0_decoupled_mode/test.py -tests/functional/L0_decoupled_mode/test.sh -tests/functional/L0_decoupled_mode/test_pytest.py -tests/functional/L0_example_add_sub_python/__init__.py -tests/functional/L0_example_add_sub_python/test.py -tests/functional/L0_example_add_sub_python/test.sh -tests/functional/L0_example_add_sub_python_with_optionals/__init__.py -tests/functional/L0_example_add_sub_python_with_optionals/test.py -tests/functional/L0_example_add_sub_python_with_optionals/test.sh -tests/functional/L0_example_dali_resnet101_pytorch/__init__.py -tests/functional/L0_example_dali_resnet101_pytorch/test.py -tests/functional/L0_example_dali_resnet101_pytorch/test.sh -tests/functional/L0_example_huggingface_bart_pytorch/__init__.py -tests/functional/L0_example_huggingface_bart_pytorch/test.py -tests/functional/L0_example_huggingface_bart_pytorch/test.sh -tests/functional/L0_example_huggingface_bert_jax/__init__.py -tests/functional/L0_example_huggingface_bert_jax/test.py -tests/functional/L0_example_huggingface_bert_jax/test.sh -tests/functional/L0_example_huggingface_dialogpt_streaming_pytorch/__init__.py -tests/functional/L0_example_huggingface_dialogpt_streaming_pytorch/test.py -tests/functional/L0_example_huggingface_dialogpt_streaming_pytorch/test.sh -tests/functional/L0_example_huggingface_opt_multinode_jax/__init__.py -tests/functional/L0_example_huggingface_opt_multinode_jax/test.py -tests/functional/L0_example_huggingface_opt_multinode_jax/test.sh -tests/functional/L0_example_huggingface_resnet_pytorch/__init__.py -tests/functional/L0_example_huggingface_resnet_pytorch/test.py -tests/functional/L0_example_huggingface_resnet_pytorch/test.sh -tests/functional/L0_example_huggingface_stable_diffusion_pytorch/__init__.py -tests/functional/L0_example_huggingface_stable_diffusion_pytorch/test.py -tests/functional/L0_example_huggingface_stable_diffusion_pytorch/test.sh -tests/functional/L0_example_identity_python/__init__.py -tests/functional/L0_example_identity_python/test.py -tests/functional/L0_example_identity_python/test.sh -tests/functional/L0_example_linear_cupy/__init__.py -tests/functional/L0_example_linear_cupy/test.py -tests/functional/L0_example_linear_cupy/test.sh -tests/functional/L0_example_linear_random_pytorch/__init__.py -tests/functional/L0_example_linear_random_pytorch/test.py -tests/functional/L0_example_linear_random_pytorch/test.sh -tests/functional/L0_example_mlp_random_tensorflow2/__init__.py -tests/functional/L0_example_mlp_random_tensorflow2/test.py -tests/functional/L0_example_mlp_random_tensorflow2/test.sh -tests/functional/L0_example_multi_instance_resnet50_pytorch/__init__.py -tests/functional/L0_example_multi_instance_resnet50_pytorch/test.py -tests/functional/L0_example_multi_instance_resnet50_pytorch/test.sh -tests/functional/L0_example_multiple_models_example/__init__.py -tests/functional/L0_example_multiple_models_example/test.py -tests/functional/L0_example_multiple_models_example/test.sh -tests/functional/L0_example_nemo_megatron_gpt/__init__.py -tests/functional/L0_example_nemo_megatron_gpt/test.py -tests/functional/L0_example_nemo_megatron_gpt/test.sh -tests/functional/L0_example_online_learning/__init__.py -tests/functional/L0_example_online_learning/test.py -tests/functional/L0_example_online_learning/test.sh -tests/functional/L0_example_perf_analyzer/__init__.py -tests/functional/L0_example_perf_analyzer/test.py -tests/functional/L0_example_perf_analyzer/test.sh -tests/functional/L0_example_simple_python_remote_mode/__init__.py -tests/functional/L0_example_simple_python_remote_mode/test.py -tests/functional/L0_example_simple_python_remote_mode/test.sh -tests/functional/L0_example_use_parameters_and_headers/__init__.py -tests/functional/L0_example_use_parameters_and_headers/test.py -tests/functional/L0_example_use_parameters_and_headers/test.sh -tests/functional/L0_fatal_error_handling/__init__.py -tests/functional/L0_fatal_error_handling/test.py -tests/functional/L0_fatal_error_handling/test.sh -tests/functional/L0_inference_with_variable_io_size/__init__.py -tests/functional/L0_inference_with_variable_io_size/test.py -tests/functional/L0_inference_with_variable_io_size/test.sh -tests/functional/L0_model_error_handling/__init__.py -tests/functional/L0_model_error_handling/test.py -tests/functional/L0_model_error_handling/test.sh -tests/functional/L0_network_timeouts/__init__.py -tests/functional/L0_network_timeouts/test.py -tests/functional/L0_network_timeouts/test.sh -tests/functional/L0_network_timeouts/test_pytest.py -tests/functional/L0_remote_life_cycle/__init__.py -tests/functional/L0_remote_life_cycle/test.py -tests/functional/L0_remote_life_cycle/test.sh -tests/functional/L0_remote_life_cycle/test_pytest.py -tests/functional/L0_resources_released_after_signals/__init__.py -tests/functional/L0_resources_released_after_signals/server.py -tests/functional/L0_resources_released_after_signals/test.py -tests/functional/L0_resources_released_after_signals/test.sh -tests/functional/L0_response_cache/__init__.py -tests/functional/L0_response_cache/test.py -tests/functional/L0_response_cache/test.sh -tests/functional/L0_run_in_thread/__init__.py -tests/functional/L0_run_in_thread/server.py -tests/functional/L0_run_in_thread/test.py -tests/functional/L0_run_in_thread/test.sh -tests/functional/L0_run_on_python_3-10/__init__.py -tests/functional/L0_run_on_python_3-10/test.py -tests/functional/L0_run_on_python_3-10/test.sh -tests/functional/L0_run_on_python_3-10_pyenv/__init__.py -tests/functional/L0_run_on_python_3-10_pyenv/test.py -tests/functional/L0_run_on_python_3-10_pyenv/test.sh -tests/functional/L0_run_on_python_3-11/__init__.py -tests/functional/L0_run_on_python_3-11/test.py -tests/functional/L0_run_on_python_3-11/test.sh -tests/functional/L0_run_on_python_3-11_pyenv/__init__.py -tests/functional/L0_run_on_python_3-11_pyenv/test.py -tests/functional/L0_run_on_python_3-11_pyenv/test.sh -tests/functional/L0_run_on_python_3-8/__init__.py -tests/functional/L0_run_on_python_3-8/test.py -tests/functional/L0_run_on_python_3-8/test.sh -tests/functional/L0_run_on_python_3-8_pyenv/__init__.py -tests/functional/L0_run_on_python_3-8_pyenv/test.py -tests/functional/L0_run_on_python_3-8_pyenv/test.sh -tests/functional/L0_run_on_python_3-9/__init__.py -tests/functional/L0_run_on_python_3-9/test.py -tests/functional/L0_run_on_python_3-9/test.sh -tests/functional/L0_run_on_python_3-9_pyenv/__init__.py -tests/functional/L0_run_on_python_3-9_pyenv/test.py -tests/functional/L0_run_on_python_3-9_pyenv/test.sh -tests/functional/L0_triton_close_on_parent_death/__init__.py -tests/functional/L0_triton_close_on_parent_death/server.py -tests/functional/L0_triton_close_on_parent_death/test.py -tests/functional/L0_triton_close_on_parent_death/test.sh -tests/functional/L0_tritons_cohabitation/__init__.py -tests/functional/L0_tritons_cohabitation/test.py -tests/functional/L0_tritons_cohabitation/test.sh -tests/functional/L0_tritons_cohabitation/test_pytest.py -tests/functional/L0_warning_on_too_verbose_level/__init__.py -tests/functional/L0_warning_on_too_verbose_level/test.py -tests/functional/L0_warning_on_too_verbose_level/test.sh -tests/functional/L1_example_nemo_megatron_gpt_prompt_learning/__init__.py -tests/functional/L1_example_nemo_megatron_gpt_prompt_learning/test.py -tests/functional/L1_example_nemo_megatron_gpt_prompt_learning/test.sh -tests/functional/L1_futures_client_stress/__init__.py -tests/functional/L1_futures_client_stress/test.py -tests/functional/L1_futures_client_stress/test.sh -tests/functional/L1_hf_nlp_distilbert/__init__.py -tests/functional/L1_hf_nlp_distilbert/test.py -tests/functional/L1_hf_nlp_distilbert/test.sh -tests/functional/L1_tfhub_image_detection/__init__.py -tests/functional/L1_tfhub_image_detection/test.py -tests/functional/L1_tfhub_image_detection/test.sh -tests/functional/L2_asyncio_client_long_test/__init__.py -tests/functional/L2_asyncio_client_long_test/test.py -tests/functional/L2_asyncio_client_long_test/test.sh -tests/functional/L2_futures_client_long_test/__init__.py -tests/functional/L2_futures_client_long_test/test.py -tests/functional/L2_futures_client_long_test/test.sh -tests/functional/L2_hf_nlp_distilbert/__init__.py -tests/functional/L2_hf_nlp_distilbert/test.py -tests/functional/L2_hf_nlp_distilbert/test.sh -tests/functional/L2_tfhub_image_detection/__init__.py -tests/functional/L2_tfhub_image_detection/test.py -tests/functional/L2_tfhub_image_detection/test.sh -tests/functional/common/__init__.py -tests/functional/common/datasets.py -tests/functional/common/models.py -tests/functional/common/tests/__init__.py -tests/functional/common/tests/client_stress.py -tests/functional/common/tests/hf_nlp_distilbert.py -tests/functional/common/tests/tfhub_image_detection.py -tests/unit/__init__.py -tests/unit/client_common.py -tests/unit/common.py -tests/unit/test_asyncio_client.py -tests/unit/test_client_utils.py -tests/unit/test_communication_numpy_serialization.py -tests/unit/test_communication_tensor_store.py -tests/unit/test_decorators.py -tests/unit/test_decorators_batch.py -tests/unit/test_futures_client.py -tests/unit/test_init.py -tests/unit/test_model.py -tests/unit/test_model_config_generator.py -tests/unit/test_model_config_parser.py -tests/unit/test_model_proxy_communication.py -tests/unit/test_models_manager.py -tests/unit/test_proxy_inference_handler.py -tests/unit/test_proxy_validators.py -tests/unit/test_sync_client.py -tests/unit/test_tensor.py -tests/unit/test_triton.py -tests/unit/test_triton_config.py -tests/unit/test_triton_server.py -tests/unit/test_utils_dataclasses.py -tests/unit/test_utils_distribution.py -tests/unit/test_workspace.py -tests/unit/utils.py -tests/unit/assets/full_config.pbtxt -tests/unit/assets/invalid_config.pbtxt -tests/unit/assets/valid_config.pbtxt \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/nvidia_pytriton.egg-info/dependency_links.txt b/stf/stf-api-alternative/pytriton/nvidia_pytriton.egg-info/dependency_links.txt deleted file mode 100644 index 8b137891791fe96927ad78e64b0aad7bded08bdc..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/nvidia_pytriton.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/stf/stf-api-alternative/pytriton/nvidia_pytriton.egg-info/requires.txt b/stf/stf-api-alternative/pytriton/nvidia_pytriton.egg-info/requires.txt deleted file mode 100644 index 1dce6ad8709a6b9e72fc376a7f67cbc30acf43cc..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/nvidia_pytriton.egg-info/requires.txt +++ /dev/null @@ -1,38 +0,0 @@ -numpy~=1.21 -protobuf>=3.7.0 -pyzmq~=23.0 -sh~=1.14 -tritonclient[all]~=2.39 -typing_inspect~=0.6.0 -wrapt>=1.11.0 - -[dev] -nvidia-pytriton[test] -nvidia-pytriton[doc] -black>=22.8 -build<1.0.0,>=0.8 -ipython>=7.16 -isort>=5.10 -pudb>=2022.1.3 -pip>=21.3 -twine>=4.0 - -[doc] -GitPython>=3.1.30 -mike>=2.0.0 -mkdocs-htmlproofer-plugin>=0.8.0 -mkdocs-material>=8.5.6 -mkdocstrings[python]>=0.19.0 - -[test] -pytest~=7.2 -pytest-codeblocks~=0.16 -pytest-mock~=3.8 -pytest-timeout~=2.1 -alt-pytest-asyncio~=0.7 -pytype!=2021.11.18,!=2022.2.17 -pre-commit>=2.20.0 -tox>=3.23.1 -tqdm>=4.64.1 -psutil~=5.1 -py-spy~=0.3 diff --git a/stf/stf-api-alternative/pytriton/nvidia_pytriton.egg-info/top_level.txt b/stf/stf-api-alternative/pytriton/nvidia_pytriton.egg-info/top_level.txt deleted file mode 100644 index 40a3c56ce8a521727c6f06e649f52676aac5eb54..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/nvidia_pytriton.egg-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -pytriton diff --git a/stf/stf-api-alternative/pytriton/pyproject.toml b/stf/stf-api-alternative/pytriton/pyproject.toml deleted file mode 100644 index 96d1c779df460aed7c8cca5d2fac6ac8860366aa..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pyproject.toml +++ /dev/null @@ -1,151 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -[project] -name = "nvidia-pytriton" -readme = "docs/pypi.rst" -description = "PyTriton - Flask/FastAPI-like interface to simplify Triton's deployment in Python environments." -dynamic = ["version"] -classifiers = [ - "Development Status :: 3 - Alpha", - "Intended Audience :: Science/Research", - "Intended Audience :: Developers", - "Topic :: Software Development", - "Topic :: Scientific/Engineering", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Operating System :: Unix", -] -authors = [] -maintainers = [] -keywords = [] -license = {text = "Apache 2.0"} -requires-python = ">=3.8,<4" -dependencies = [ - "numpy ~= 1.21", - "protobuf>=3.7.0", - "pyzmq ~= 23.0", - "sh ~= 1.14", - "tritonclient[all] ~= 2.39", - "typing_inspect ~= 0.6.0", - "wrapt >= 1.11.0", -] - -[project.urls] -"Documentation" = "https://triton-inference-server.github.io/pytriton" -"Source" = "https://github.com/triton-inference-server/pytriton" -"Tracker" = "https://github.com/triton-inference-server/pytriton/issues" - -[project.optional-dependencies] -test = [ - "pytest ~= 7.2", - "pytest-codeblocks ~= 0.16", - "pytest-mock ~= 3.8", - "pytest-timeout ~= 2.1", # timeouts for non-asyncio tests - "alt-pytest-asyncio ~= 0.7", # timeout for asyncio tests - "pytype!=2021.11.18,!=2022.2.17", - "pre-commit >= 2.20.0", - "tox >= 3.23.1", - "tqdm >= 4.64.1", - "psutil ~= 5.1", - "py-spy ~= 0.3", -] -doc = [ - "GitPython >= 3.1.30", - "mike >= 2.0.0", - "mkdocs-htmlproofer-plugin >= 0.8.0", - "mkdocs-material >= 8.5.6", - "mkdocstrings[python] >= 0.19.0", -] -dev = [ - "nvidia-pytriton[test]", - "nvidia-pytriton[doc]", - "black >= 22.8", - "build >= 0.8, <1.0.0", # to support --plat-name for multiarch build - "ipython >= 7.16", - "isort >= 5.10", - "pudb >= 2022.1.3", - "pip >= 21.3", # to support editable installation - "twine >= 4.0", -] - -[build-system] -requires = [ - "setuptools>=65.3.0", - "setuptools_scm[toml]>=6.2", - "wheel>=0.37.1", -] -build-backend = "setuptools.build_meta" - -[tool.setuptools] -include-package-data = true -license-files = ["LICENSE"] - -[tool.setuptools.packages.find] -namespaces = false -include = ["pytriton"] - -[tool.setuptools.package-data] -pytriton = [ - "**/*", -] -[tool.setuptools.exclude-package-data] -pytriton = [ - "tritonserver/external_libs/*", -] - -[tool.setuptools_scm] - -[tool.distutils.bdist_wheel] -plat-name = "linux_x86_64" - - -[tool.black] -line-length = 120 -target-version = ['py38'] -include = '\.pyi?$' -exclude = ''' - -( - /( - \.eggs # exclude a few common directories in the - | \.git # root of the project - | \.hg - | \.mypy_cache - | \.tox - | \.venv - | _build - | buck-out - | build - | dist - )/ -) -''' - -[tool.isort] -profile = "black" -line_length = 120 - -[tool.vulture] -exclude = [] -ignore_decorators = [] -ignore_names = [] -make_whitelist = true -min_confidence = 80 -paths = ["pytriton"] -sort_by_size = true -verbose = false diff --git a/stf/stf-api-alternative/pytriton/pytriton/__init__.py b/stf/stf-api-alternative/pytriton/pytriton/__init__.py deleted file mode 100644 index 5dc88b4dca82662ee682fe49cddcb2dcfc6db3fb..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# noqa: D104 -from importlib.metadata import PackageNotFoundError, version - -try: - __version__ = version("nvidia-pytriton") -except PackageNotFoundError: - # package is not installed - pass - -from pytriton import client # noqa: F401 -from pytriton import model_config # noqa: F401 -from pytriton import triton # noqa: F401 diff --git a/stf/stf-api-alternative/pytriton/pytriton/client/__init__.py b/stf/stf-api-alternative/pytriton/pytriton/client/__init__.py deleted file mode 100644 index 517a21e6f24f4f0e25b68558b4142c6f8d595a17..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/client/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# noqa: D104 - -from .client import AsyncioModelClient # noqa: F401 -from .client import DecoupledModelClient # noqa: F401 -from .client import FuturesModelClient # noqa: F401 -from .client import ModelClient # noqa: F401 diff --git a/stf/stf-api-alternative/pytriton/pytriton/client/asyncio_utils.py b/stf/stf-api-alternative/pytriton/pytriton/client/asyncio_utils.py deleted file mode 100644 index 42d5d55757c4f752ad9101207900c50d57478ce1..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/client/asyncio_utils.py +++ /dev/null @@ -1,307 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Utility module supporting model clients.""" -import asyncio -import logging -import time -from typing import Optional, Union - -import aiohttp -import grpc -import tritonclient.grpc -import tritonclient.http - -from pytriton.client.exceptions import PyTritonClientModelUnavailableError, PyTritonClientTimeoutError -from pytriton.client.utils import LATEST_MODEL_VERSION, ModelState, parse_grpc_response, parse_http_response -from pytriton.model_config.parser import ModelConfigParser - -aio_clients = Union[tritonclient.grpc.aio.InferenceServerClient, tritonclient.http.aio.InferenceServerClient] - -_LOGGER = logging.getLogger(__name__) - -_DEFAULT_WAIT_FOR_MODEL_TIMEOUT_S = 60.0 # 60 seconds -_DEFAULT_ASYNC_SLEEP_FACTOR_S = 0.1 # 10% of timeout - - -async def asyncio_get_model_state( - client: aio_clients, - model_name: str, - model_version: Optional[str] = None, -) -> ModelState: - """Obtains state of the model deployed in Triton Inference Server. - - Typical use: - - >>> import tritonclient.http.aio - ... client = tritonclient.http.aio.InferenceServerClient("localhost:8000") - ... model_state = await get_model_state(client, "MyModel", "1") - - Args: - client: Triton Inference Server client to use for communication - model_name: name of the model which state we're requesting. - model_version: - version of the model which state we're requesting. - If model_version is None state of latest model is returned. - The latest versions of the model are the numerically greatest version numbers. - - Returns: - Model state. ModelState.UNAVAILABLE is returned in case if model with given name and version is not found. - - """ - _LOGGER.debug(f"Obtaining model {model_name} state") - repository_index = await client.get_model_repository_index() - _LOGGER.debug("Model repository index obtained") - if isinstance(repository_index, list): - models_states = parse_http_response(models=repository_index) - else: - models_states = parse_grpc_response(models=repository_index.models) - - if model_version is None: - requested_model_states = { - version: state for (name, version), state in models_states.items() if name == model_name - } - if not requested_model_states: - return ModelState.UNAVAILABLE - else: - requested_model_states = sorted(requested_model_states.items(), key=lambda item: int(item[0])) - latest_version, latest_version_state = requested_model_states[-1] - _LOGGER.debug(f"Model {model_name} latest version: {latest_version} state: {latest_version_state}") - return latest_version_state - else: - key = (model_name, model_version) - if key not in models_states: - return ModelState.UNAVAILABLE - else: - model_state = models_states[key] - _LOGGER.debug(f"Model {model_name} version {model_version} state: {model_state}") - return model_state - - -async def asyncio_get_model_config( - client: aio_clients, - model_name: str, - model_version: Optional[str] = None, - timeout_s: float = _DEFAULT_WAIT_FOR_MODEL_TIMEOUT_S, -): - """Obtain configuration of model deployed on the Triton Inference Server. - - Function waits for server readiness. - - Args: - client: Triton Inference Server client to use for communication - model_name: name of the model which configuration we're requesting. - model_version: - version of the model which configuration we're requesting. - If model_version is None configuration of the latest model is returned. - The latest versions of the model are the numerically greatest version numbers. - timeout_s: timeout to finish model configuration obtain. - - Returns: - Configuration of requested model. - - Raises: - PyTritonClientTimeoutError: If obtain of model configuration didn't finish before given timeout. - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - """ - should_finish_before = time.time() + timeout_s - _LOGGER.debug(f"Obtaining model {model_name} config (timeout={timeout_s:0.2f})") - try: - _LOGGER.debug(f"Waiting for model {model_name} to be ready") - await asyncio.wait_for( - asyncio_wait_for_model_ready( - client, model_name=model_name, model_version=model_version, timeout_s=timeout_s - ), - timeout_s, - ) - - model_version = model_version or "" - - timeout_s = max(0, should_finish_before - time.time()) - if isinstance(client, tritonclient.grpc.aio.InferenceServerClient): - _LOGGER.debug(f"Obtaining model {model_name} config as_json=True") - response = await asyncio.wait_for( - client.get_model_config(model_name, model_version, as_json=True), timeout_s - ) - model_config = response["config"] - else: - _LOGGER.debug(f"Obtaining model {model_name} config") - model_config = await asyncio.wait_for(client.get_model_config(model_name, model_version), timeout_s) - _LOGGER.debug("Model config obtained") - model_config = ModelConfigParser.from_dict(model_config) - _LOGGER.debug(f"Model config: {model_config}") - return model_config - except asyncio.TimeoutError as e: - message = f"Timeout while waiting for model {model_name} config (timeout={timeout_s:0.2f})" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - - -async def asyncio_wait_for_server_ready( - asyncio_client: aio_clients, - sleep_time_s: float, -): - """Wait for Triton Inference Server readiness. - - There are two functions, which check server status: - * asyncio_client.is_server_ready() - * asyncio_client.is_server_live() - Both must return true to consider server accessible to read model status. - - Function contains while loop with sleep to check server status periodically. - - Args: - asyncio_client: Triton Inference Server client to use for communication - sleep_time_s: time to sleep between server status checks - - Raises: - PyTritonClientTimeoutError: If obtain of model configuration didn't finish before given timeout. - """ - _LOGGER.debug("Waiting for server to be ready") - try: - while True: - try: - _LOGGER.debug("Waiting for server to be ready") - server_ready = await asyncio_client.is_server_ready() - _LOGGER.debug("Waiting for server to be live") - server_live = await asyncio_client.is_server_live() - except tritonclient.utils.InferenceServerException: - # Raised by tritonclient/grpc/__init__.py:75 - server_live = False - server_ready = False - except aiohttp.client_exceptions.ClientConnectorError: - # This exception is raised by aiohttp/connector.py:901 in _create_direct_connection - # and it is not translated to any other error by tritonclient/http/aio/__init__.py:132 in _get method. - # res = await self._stub.get(url=req_url, - # and tritonclient/http/aio/__init__.py:242 in is_server_ready method. - # response = await self._get(request_uri=request_uri, - server_live = False - server_ready = False - except RuntimeError: - # This exception is raised by aiohttp/client.py:400 in _request - # and it is not translated to any other error by tritonclient/grpc/aio/__init__.py:151: in is_server_ready method. - # response = await self._client_stub.ServerReady(request=request, - server_live = False - server_ready = False - except grpc._cython.cygrpc.UsageError: - # This exception is raised by grpcio/grpc/_cython/_cygrpc/aio/channel.pyx.pxi:124 - # and it is not translated to any other error by tritonclient/grpc/aio/__init__.py", line 151, in is_server_ready - # response = await self._client_stub.ServerReady(request=request, - server_live = False - server_ready = False - if server_ready and server_live: - break - _LOGGER.debug(f"Sleeping for {sleep_time_s:0.2f} seconds") - await asyncio.sleep(sleep_time_s) - except asyncio.TimeoutError as e: - # This error is caused by our timeout, not by Triton Inference Server client. - message = "Timeout while waiting for model" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - _LOGGER.debug("Server is ready") - - -async def asyncio_wait_for_model_status_loaded( - asyncio_client: aio_clients, - model_name: str, - sleep_time_s: float, - model_version: Optional[str] = None, -): - """Wait for model status loaded. - - Function runs the following async function to check model status: - ```python - asyncio_get_model_state(asyncio_client, model_name, model_version) - ``` - If it return _ModelState.READY, then another async function can check if model is really ready: - ```python - asyncio_client.is_model_ready(model_name) - ``` - This function uses the above functions to check if model is ready together - with asyncio.wait_for(...) to limit the time of waiting. - - Function contains while loop with sleep to check model status periodically. - - Args: - asyncio_client: Triton Inference Server client to use for communication - model_name: name of the model which configuration we're requesting. - model_version: - version of the model which configuration we're requesting. - If model_version is None configuration of the latest model is returned. - The latest versions of the model are the numerically greatest version numbers. - timeout_s: timeout to finish model configuration obtain. - - Raises: - PyTritonClientTimeoutError: If obtain of model configuration didn't finish before given timeout. - """ - model_version = model_version or "" - model_version_msg = model_version or LATEST_MODEL_VERSION - _LOGGER.debug(f"Waiting for model {model_name}, {model_version_msg} to be ready") - try: - while True: - _LOGGER.debug(f"Checking if model {model_name} is ready") - is_model_ready = await asyncio_client.is_model_ready(model_name, model_version) - if is_model_ready: - break - _LOGGER.debug(f"Sleeping for {sleep_time_s} seconds") - await asyncio.sleep(sleep_time_s) - except asyncio.TimeoutError as e: - message = f"Timeout while waiting for model {model_name} state (timeout={sleep_time_s:0.2f})" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - _LOGGER.debug(f"Model {model_name}, {model_version_msg} is ready") - - -async def asyncio_wait_for_model_ready( - asyncio_client: aio_clients, - model_name: str, - model_version: Optional[str] = None, - timeout_s: float = _DEFAULT_WAIT_FOR_MODEL_TIMEOUT_S, -): - """Wait for Triton Inference Server and deployed on it model readiness. - - Args: - asyncio_client: Triton Inference Server client to use for communication - model_name: name of the model which configuration we're requesting. - model_version: - version of the model which configuration we're requesting. - If model_version is None configuration of the latest model is returned. - The latest versions of the model are the numerically greatest version numbers. - timeout_s: timeout to finish model configuration obtain. - - Raises: - PyTritonClientTimeoutError: If obtain of model configuration didn't finish before given timeout. - - """ - _LOGGER.debug(f"Waiting for model {model_name} to be ready (timeout={timeout_s:0.2f})") - sleep_time_s = timeout_s * _DEFAULT_ASYNC_SLEEP_FACTOR_S - try: - should_finish_before = time.time() + timeout_s - await asyncio.wait_for(asyncio_wait_for_server_ready(asyncio_client, sleep_time_s), timeout_s) - _LOGGER.debug(f"Waiting for model {model_name} to be ready") - timeout_s = max(0, should_finish_before - time.time()) - await asyncio.wait_for( - asyncio_wait_for_model_status_loaded( - asyncio_client, model_name=model_name, model_version=model_version, sleep_time_s=sleep_time_s - ), - timeout_s, - ) - except PyTritonClientModelUnavailableError as e: - _LOGGER.error(f"Failed to obtain model {model_name} config error {e}") - raise e - except asyncio.TimeoutError as e: - _LOGGER.error(f"Failed to obtain model {model_name} config error {e}") - raise PyTritonClientTimeoutError( - f"Timeout while waiting for model {model_name} to be ready (timeout={timeout_s:0.2f})" - ) from e - _LOGGER.debug(f"Model {model_name} is ready") diff --git a/stf/stf-api-alternative/pytriton/pytriton/client/client.py b/stf/stf-api-alternative/pytriton/pytriton/client/client.py deleted file mode 100644 index 1adf710282501d2ec1081f67a3c7c5167bd94b3e..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/client/client.py +++ /dev/null @@ -1,1709 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Clients for easy interaction with models deployed on the Triton Inference Server. - -Typical usage example: - -```python -client = ModelClient("localhost", "MyModel") -result_dict = client.infer_sample(input_a=a, input_b=b) -client.close() -``` - -Inference inputs can be provided either as positional or keyword arguments: - -```python -result_dict = client.infer_sample(input1, input2) -result_dict = client.infer_sample(a=input1, b=input2) -``` - -Mixing of argument passing conventions is not supported and will raise PyTritonClientValueError. -""" - -import asyncio -import contextlib -import itertools -import logging -import socket -import time -import warnings -from concurrent.futures import Future -from queue import Empty, Full, Queue -from threading import Lock, Thread -from typing import Dict, Optional, Tuple, Union - -import gevent -import numpy as np -import tritonclient.grpc -import tritonclient.grpc.aio -import tritonclient.http -import tritonclient.http.aio -import tritonclient.utils - -from pytriton.client.asyncio_utils import asyncio_get_model_config, asyncio_wait_for_model_ready -from pytriton.client.exceptions import ( - PyTritonClientClosedError, - PyTritonClientInferenceServerError, - PyTritonClientModelDoesntSupportBatchingError, - PyTritonClientQueueFullError, - PyTritonClientTimeoutError, - PyTritonClientValueError, -) -from pytriton.client.utils import ( - _DEFAULT_NETWORK_TIMEOUT_S, - _DEFAULT_WAIT_FOR_MODEL_TIMEOUT_S, - TritonUrl, - get_model_config, - wait_for_model_ready, - wait_for_server_ready, -) -from pytriton.client.warnings import NotSupportedTimeoutWarning -from pytriton.model_config.triton_model_config import TritonModelConfig - -_LOGGER = logging.getLogger(__name__) - -_DEFAULT_SYNC_INIT_TIMEOUT_S = _DEFAULT_WAIT_FOR_MODEL_TIMEOUT_S -_DEFAULT_FUTURES_INIT_TIMEOUT_S = _DEFAULT_WAIT_FOR_MODEL_TIMEOUT_S -DEFAULT_INFERENCE_TIMEOUT_S = 60.0 - - -_IOType = Union[Tuple[np.ndarray, ...], Dict[str, np.ndarray]] - - -def _verify_inputs_args(inputs, named_inputs): - if not inputs and not named_inputs: - raise PyTritonClientValueError("Provide input data") - if not bool(inputs) ^ bool(named_inputs): - raise PyTritonClientValueError("Use either positional either keyword method arguments convention") - - -def _verify_parameters(parameters_or_headers: Optional[Dict[str, Union[str, int, bool]]] = None): - if parameters_or_headers is None: - return - if not isinstance(parameters_or_headers, dict): - raise PyTritonClientValueError("Parameters and headers must be a dictionary") - for key, value in parameters_or_headers.items(): - if not isinstance(key, str): - raise PyTritonClientValueError("Parameter/header key must be a string") - if not isinstance(value, (str, int, bool)): - raise PyTritonClientValueError("Parameter/header value must be a string, integer or boolean") - - -class BaseModelClient: - """Base client for model deployed on the Triton Inference Server.""" - - def __init__( - self, - url: str, - model_name: str, - model_version: Optional[str] = None, - *, - lazy_init: bool = True, - init_timeout_s: Optional[float] = None, - inference_timeout_s: Optional[float] = None, - model_config: Optional[TritonModelConfig] = None, - ensure_model_is_ready: bool = True, - ): - """Inits BaseModelClient for given model deployed on the Triton Inference Server. - - Common usage: - - ```python - client = ModelClient("localhost", "BERT") - result_dict = client.infer_sample(input1_sample, input2_sample) - client.close() - ``` - - Args: - url: The Triton Inference Server url, e.g. `grpc://localhost:8001`. - In case no scheme is provided http scheme will be used as default. - In case no port is provided default port for given scheme will be used - - 8001 for grpc scheme, 8000 for http scheme. - model_name: name of the model to interact with. - model_version: version of the model to interact with. - If model_version is None inference on latest model will be performed. - The latest versions of the model are numerically the greatest version numbers. - lazy_init: if initialization should be performed just before sending first request to inference server. - init_timeout_s: timeout in seconds for the server and model to be ready. If not passed, the default timeout of 300 seconds will be used. - inference_timeout_s: timeout in seconds for a single model inference request. If not passed, the default timeout of 60 seconds will be used. - model_config: model configuration. If not passed, it will be read from inference server during initialization. - ensure_model_is_ready: if model should be checked if it is ready before first inference request. - - Raises: - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - PyTritonClientTimeoutError: - if `lazy_init` argument is False and wait time for server and model being ready exceeds `init_timeout_s`. - PyTritonClientInvalidUrlError: If provided Triton Inference Server url is invalid. - """ - self._init_timeout_s = _DEFAULT_SYNC_INIT_TIMEOUT_S if init_timeout_s is None else init_timeout_s - self._inference_timeout_s = DEFAULT_INFERENCE_TIMEOUT_S if inference_timeout_s is None else inference_timeout_s - self._network_timeout_s = min(_DEFAULT_NETWORK_TIMEOUT_S, self._init_timeout_s) - - self._general_client = self.create_client_from_url(url, network_timeout_s=self._network_timeout_s) - self._infer_client = self.create_client_from_url(url, network_timeout_s=self._inference_timeout_s) - - self._model_name = model_name - self._model_version = model_version - - self._request_id_generator = itertools.count(0) - - # Monkey patch __del__ method from client to catch error in client when instance is garbage collected. - # This is needed because we are closing client in __exit__ method or in close method. - # (InferenceClient uses gevent library which does not support closing twice from different threads) - self._monkey_patch_client() - - if model_config is not None: - self._model_config = model_config - self._model_ready = None if ensure_model_is_ready else True - - else: - self._model_config = None - self._model_ready = None - self._lazy_init: bool = lazy_init - - self._handle_lazy_init() - - @classmethod - def from_existing_client(cls, existing_client: "BaseModelClient"): - """Create a new instance from an existing client using the same class. - - Common usage: - ```python - client = BaseModelClient.from_existing_client(existing_client) - ``` - - Args: - existing_client: An instance of an already initialized subclass. - - Returns: - A new instance of the same subclass with shared configuration and readiness state. - """ - kwargs = {} - # Copy model configuration and readiness state if present - if hasattr(existing_client, "_model_config"): - kwargs["model_config"] = existing_client._model_config - kwargs["ensure_model_is_ready"] = False - - new_client = cls( - url=existing_client._url, - model_name=existing_client._model_name, - model_version=existing_client._model_version, - init_timeout_s=existing_client._init_timeout_s, - inference_timeout_s=existing_client._inference_timeout_s, - **kwargs, - ) - - return new_client - - def create_client_from_url(self, url: str, network_timeout_s: Optional[float] = None): - """Create Triton Inference Server client. - - Args: - url: url of the server to connect to. - If url doesn't contain scheme (e.g. "localhost:8001") http scheme is added. - If url doesn't contain port (e.g. "localhost") default port for given scheme is added. - network_timeout_s: timeout for client commands. Default value is 60.0 s. - - Returns: - Triton Inference Server client. - - Raises: - PyTritonClientInvalidUrlError: If provided Triton Inference Server url is invalid. - """ - self._triton_url = TritonUrl.from_url(url) - self._url = self._triton_url.without_scheme - self._triton_client_lib = self.get_lib() - self._monkey_patch_client() - - if self._triton_url.scheme == "grpc": - # by default grpc client has very large number of timeout, thus we want to make it equal to http client timeout - network_timeout_s = _DEFAULT_NETWORK_TIMEOUT_S if network_timeout_s is None else network_timeout_s - warnings.warn( - f"tritonclient.grpc doesn't support timeout for other commands than infer. Ignoring network_timeout: {network_timeout_s}.", - NotSupportedTimeoutWarning, - stacklevel=1, - ) - - triton_client_init_kwargs = self._get_init_extra_args() - - _LOGGER.debug( - f"Creating InferenceServerClient for {self._triton_url.with_scheme} with {triton_client_init_kwargs}" - ) - return self._triton_client_lib.InferenceServerClient(self._url, **triton_client_init_kwargs) - - def get_lib(self): - """Returns tritonclient library for given scheme.""" - raise NotImplementedError - - @property - def _next_request_id(self) -> str: - # pytype complained about creating generator in __init__ method - # so we create it lazily - if getattr(self, "_request_id_generator", None) is None: - self._request_id_generator = itertools.count(0) - return str(next(self._request_id_generator)) - - def _get_init_extra_args(self): - timeout = self._inference_timeout_s # pytype: disable=attribute-error - # The inference timeout is used for both the HTTP and the GRPC protocols. However, - # the way the timeout is passed to the client differs depending on the protocol. - # For the HTTP protocol, the timeout is set in the ``__init__`` method as ``network_timeout`` - # and ``connection_timeout``. For the GRPC protocol, the timeout - # is passed to the infer method as ``client_timeout``. - # Both protocols support timeouts correctly and will raise an exception - # if the network request or the inference process takes longer than the timeout. - # This is a design choice of the underlying tritonclient library. - - if self._triton_url.scheme != "http": - return {} - - kwargs = { - # This value sets the maximum time allowed for each network request in both model loading and inference process - "network_timeout": timeout, - # This value sets the maximum time allowed for establishing a connection to the server. - # We use the inference timeout here instead of the init timeout because the init timeout - # is meant for waiting for the model to be ready. The connection timeout should be shorter - # than the init timeout because it only checks if connection is established (e.g. correct port) - "connection_timeout": timeout, - } - return kwargs - - def _monkey_patch_client(self): - pass - - def _get_model_config_extra_args(self): - # For the GRPC protocol, the timeout must be passed to the each request as client_timeout - # model_config doesn't yet support timeout but it is planned for the future - # grpc_network_timeout_s will be used for model_config - return {} - - def _handle_lazy_init(self): - raise NotImplementedError - - -class ModelClient(BaseModelClient): - """Synchronous client for model deployed on the Triton Inference Server.""" - - def __init__( - self, - url: str, - model_name: str, - model_version: Optional[str] = None, - *, - lazy_init: bool = True, - init_timeout_s: Optional[float] = None, - inference_timeout_s: Optional[float] = None, - model_config: Optional[TritonModelConfig] = None, - ensure_model_is_ready: bool = True, - ): - """Inits ModelClient for given model deployed on the Triton Inference Server. - - If `lazy_init` argument is False, model configuration will be read - from inference server during initialization. - - Common usage: - - ```python - client = ModelClient("localhost", "BERT") - result_dict = client.infer_sample(input1_sample, input2_sample) - client.close() - ``` - - Client supports also context manager protocol: - - ```python - with ModelClient("localhost", "BERT") as client: - result_dict = client.infer_sample(input1_sample, input2_sample) - ``` - - The creation of client requires connection to the server and downloading model configuration. You can create client from existing client using the same class: - - ```python - client = ModelClient.from_existing_client(existing_client) - ``` - - Args: - url: The Triton Inference Server url, e.g. 'grpc://localhost:8001'. - In case no scheme is provided http scheme will be used as default. - In case no port is provided default port for given scheme will be used - - 8001 for grpc scheme, 8000 for http scheme. - model_name: name of the model to interact with. - model_version: version of the model to interact with. - If model_version is None inference on latest model will be performed. - The latest versions of the model are numerically the greatest version numbers. - lazy_init: if initialization should be performed just before sending first request to inference server. - init_timeout_s: timeout for maximum waiting time in loop, which sends retry requests ask if model is ready. It is applied at initialization time only when `lazy_init` argument is False. Default is to do retry loop at first inference. - inference_timeout_s: timeout in seconds for the model inference process. - If non passed default 60 seconds timeout will be used. - For HTTP client it is not only inference timeout but any client request timeout - - get model config, is model loaded. For GRPC client it is only inference timeout. - model_config: model configuration. If not passed, it will be read from inference server during initialization. - ensure_model_is_ready: if model should be checked if it is ready before first inference request. - - Raises: - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - PyTritonClientTimeoutError: - if `lazy_init` argument is False and wait time for server and model being ready exceeds `init_timeout_s`. - PyTritonClientUrlParseError: In case of problems with parsing url. - """ - super().__init__( - url=url, - model_name=model_name, - model_version=model_version, - lazy_init=lazy_init, - init_timeout_s=init_timeout_s, - inference_timeout_s=inference_timeout_s, - model_config=model_config, - ensure_model_is_ready=ensure_model_is_ready, - ) - - def get_lib(self): - """Returns tritonclient library for given scheme.""" - return {"grpc": tritonclient.grpc, "http": tritonclient.http}[self._triton_url.scheme.lower()] - - def __enter__(self): - """Create context for using ModelClient as a context manager.""" - return self - - def __exit__(self, *_): - """Close resources used by ModelClient instance when exiting from the context.""" - self.close() - - def load_model(self, config: Optional[str] = None, files: Optional[dict] = None): - """Load model on the Triton Inference Server. - - Args: - config: str - Optional JSON representation of a model config provided for - the load request, if provided, this config will be used for - loading the model. - files: dict - Optional dictionary specifying file path (with "file:" prefix) in - the override model directory to the file content as bytes. - The files will form the model directory that the model will be - loaded from. If specified, 'config' must be provided to be - the model configuration of the override model directory. - """ - self._general_client.load_model(self._model_name, config=config, files=files) - - def unload_model(self): - """Unload model from the Triton Inference Server.""" - self._general_client.unload_model(self._model_name) - - def close(self): - """Close resources used by ModelClient. - - This method closes the resources used by the ModelClient instance, - including the Triton Inference Server connections. - Once this method is called, the ModelClient instance should not be used again. - """ - _LOGGER.debug("Closing ModelClient") - try: - if self._general_client is not None: - self._general_client.close() - if self._infer_client is not None: - self._infer_client.close() - self._general_client = None - self._infer_client = None - except Exception as e: - _LOGGER.error(f"Error while closing ModelClient resources: {e}") - raise e - - def wait_for_model(self, timeout_s: float): - """Wait for the Triton Inference Server and the deployed model to be ready. - - Args: - timeout_s: timeout in seconds to wait for the server and model to be ready. - - Raises: - PyTritonClientTimeoutError: If the server and model are not ready before the given timeout. - PyTritonClientModelUnavailableError: If the model with the given name (and version) is unavailable. - KeyboardInterrupt: If the hosting process receives SIGINT. - PyTritonClientClosedError: If the ModelClient is closed. - """ - if self._general_client is None: - raise PyTritonClientClosedError("ModelClient is closed") - wait_for_model_ready(self._general_client, self._model_name, self._model_version, timeout_s=timeout_s) - - @property - def is_batching_supported(self): - """Checks if model supports batching. - - Also waits for server to get into readiness state. - """ - return self.model_config.max_batch_size > 0 - - def wait_for_server(self, timeout_s: float): - """Wait for Triton Inference Server readiness. - - Args: - timeout_s: timeout to server get into readiness state. - - Raises: - PyTritonClientTimeoutError: If server is not in readiness state before given timeout. - KeyboardInterrupt: If hosting process receives SIGINT - """ - wait_for_server_ready(self._general_client, timeout_s=timeout_s) - - @property - def model_config(self) -> TritonModelConfig: - """Obtain the configuration of the model deployed on the Triton Inference Server. - - This method waits for the server to get into readiness state before obtaining the model configuration. - - Returns: - TritonModelConfig: configuration of the model deployed on the Triton Inference Server. - - Raises: - PyTritonClientTimeoutError: If the server and model are not in readiness state before the given timeout. - PyTritonClientModelUnavailableError: If the model with the given name (and version) is unavailable. - KeyboardInterrupt: If the hosting process receives SIGINT. - PyTritonClientClosedError: If the ModelClient is closed. - """ - if not self._model_config: - if self._general_client is None: - raise PyTritonClientClosedError("ModelClient is closed") - - self._model_config = get_model_config( - self._general_client, self._model_name, self._model_version, timeout_s=self._init_timeout_s - ) - return self._model_config - - def infer_sample( - self, - *inputs, - parameters: Optional[Dict[str, Union[str, int, bool]]] = None, - headers: Optional[Dict[str, Union[str, int, bool]]] = None, - **named_inputs, - ) -> Dict[str, np.ndarray]: - """Run synchronous inference on a single data sample. - - Typical usage: - - ```python - client = ModelClient("localhost", "MyModel") - result_dict = client.infer_sample(input1, input2) - client.close() - ``` - - Inference inputs can be provided either as positional or keyword arguments: - - ```python - result_dict = client.infer_sample(input1, input2) - result_dict = client.infer_sample(a=input1, b=input2) - ``` - - Args: - *inputs: Inference inputs provided as positional arguments. - parameters: Custom inference parameters. - headers: Custom inference headers. - **named_inputs: Inference inputs provided as named arguments. - - Returns: - Dictionary with inference results, where dictionary keys are output names. - - Raises: - PyTritonClientValueError: If mixing of positional and named arguments passing detected. - PyTritonClientTimeoutError: If the wait time for the server and model being ready exceeds `init_timeout_s` or - inference request time exceeds `inference_timeout_s`. - PyTritonClientModelUnavailableError: If the model with the given name (and version) is unavailable. - PyTritonClientInferenceServerError: If an error occurred on the inference callable or Triton Inference Server side. - """ - _verify_inputs_args(inputs, named_inputs) - _verify_parameters(parameters) - _verify_parameters(headers) - - if self.is_batching_supported: - if inputs: - inputs = tuple(data[np.newaxis, ...] for data in inputs) - elif named_inputs: - named_inputs = {name: data[np.newaxis, ...] for name, data in named_inputs.items()} - - result = self._infer(inputs or named_inputs, parameters, headers) - - return self._debatch_result(result) - - def infer_batch( - self, - *inputs, - parameters: Optional[Dict[str, Union[str, int, bool]]] = None, - headers: Optional[Dict[str, Union[str, int, bool]]] = None, - **named_inputs, - ) -> Dict[str, np.ndarray]: - """Run synchronous inference on batched data. - - Typical usage: - - ```python - client = ModelClient("localhost", "MyModel") - result_dict = client.infer_batch(input1, input2) - client.close() - ``` - - Inference inputs can be provided either as positional or keyword arguments: - - ```python - result_dict = client.infer_batch(input1, input2) - result_dict = client.infer_batch(a=input1, b=input2) - ``` - - Args: - *inputs: Inference inputs provided as positional arguments. - parameters: Custom inference parameters. - headers: Custom inference headers. - **named_inputs: Inference inputs provided as named arguments. - - Returns: - Dictionary with inference results, where dictionary keys are output names. - - Raises: - PyTritonClientValueError: If mixing of positional and named arguments passing detected. - PyTritonClientTimeoutError: If the wait time for the server and model being ready exceeds `init_timeout_s` or - inference request time exceeds `inference_timeout_s`. - PyTritonClientModelUnavailableError: If the model with the given name (and version) is unavailable. - PyTritonClientInferenceServerError: If an error occurred on the inference callable or Triton Inference Server side. - PyTritonClientModelDoesntSupportBatchingError: If the model doesn't support batching. - PyTritonClientValueError: if mixing of positional and named arguments passing detected. - PyTritonClientTimeoutError: - in case of first method call, `lazy_init` argument is False - and wait time for server and model being ready exceeds `init_timeout_s` or - inference time exceeds `inference_timeout_s` passed to `__init__`. - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - PyTritonClientInferenceServerError: If error occurred on inference callable or Triton Inference Server side, - """ - _verify_inputs_args(inputs, named_inputs) - _verify_parameters(parameters) - _verify_parameters(headers) - - if not self.is_batching_supported: - raise PyTritonClientModelDoesntSupportBatchingError( - f"Model {self.model_config.model_name} doesn't support batching - use infer_sample method instead" - ) - - return self._infer(inputs or named_inputs, parameters, headers) - - def _wait_and_init_model_config(self, init_timeout_s: float): - if self._general_client is None: - raise PyTritonClientClosedError("ModelClient is closed") - - should_finish_before_s = time.time() + init_timeout_s - self.wait_for_model(init_timeout_s) - self._model_ready = True - timeout_s = max(0.0, should_finish_before_s - time.time()) - self._model_config = get_model_config( - self._general_client, self._model_name, self._model_version, timeout_s=timeout_s - ) - - def _create_request(self, inputs: _IOType): - if self._infer_client is None: - raise PyTritonClientClosedError("ModelClient is closed") - - if not self._model_ready: - self._wait_and_init_model_config(self._init_timeout_s) - - if isinstance(inputs, Tuple): - inputs = {input_spec.name: input_data for input_spec, input_data in zip(self.model_config.inputs, inputs)} - - inputs_wrapped = [] - - for input_name, input_data in inputs.items(): - if input_data.dtype == object and not isinstance(input_data.reshape(-1)[0], bytes): - raise RuntimeError( - f"Numpy array for {input_name!r} input with dtype=object should contain encoded strings \ - \\(e.g. into utf-8\\). Element type: {type(input_data.reshape(-1)[0])}" - ) - if input_data.dtype.type == np.str_: - raise RuntimeError( - "Unicode inputs are not supported. " - f"Encode numpy array for {input_name!r} input (ex. with np.char.encode(array, 'utf-8'))." - ) - triton_dtype = tritonclient.utils.np_to_triton_dtype(input_data.dtype) - infer_input = self._triton_client_lib.InferInput(input_name, input_data.shape, triton_dtype) - infer_input.set_data_from_numpy(input_data) - inputs_wrapped.append(infer_input) - - outputs_wrapped = [ - self._triton_client_lib.InferRequestedOutput(output_spec.name) for output_spec in self.model_config.outputs - ] - return inputs_wrapped, outputs_wrapped - - def _infer(self, inputs: _IOType, parameters, headers) -> Dict[str, np.ndarray]: - if self.model_config.decoupled: - raise PyTritonClientInferenceServerError("Model config is decoupled. Use DecoupledModelClient instead.") - - inputs_wrapped, outputs_wrapped = self._create_request(inputs) - - try: - _LOGGER.debug("Sending inference request to Triton Inference Server") - response = self._infer_client.infer( - model_name=self._model_name, - model_version=self._model_version or "", - inputs=inputs_wrapped, - headers=headers, - outputs=outputs_wrapped, - request_id=self._next_request_id, - parameters=parameters, - **self._get_infer_extra_args(), - ) - except tritonclient.utils.InferenceServerException as e: - # tritonclient.grpc raises execption with message containing "Deadline Exceeded" for timeout - if "Deadline Exceeded" in e.message(): - raise PyTritonClientTimeoutError( - f"Timeout occurred during inference request. Timeout: {self._inference_timeout_s} s. Message: {e.message()}" - ) from e - - raise PyTritonClientInferenceServerError( - f"Error occurred during inference request. Message: {e.message()}" - ) from e - except socket.timeout as e: # tritonclient.http raises socket.timeout for timeout - message = f"Timeout occurred during inference request. Timeout: {self._inference_timeout_s} s Message: {e}" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - except OSError as e: # tritonclient.http raises socket.error for connection error - message = f"Timeout occurred during inference request. Timeout: {self._inference_timeout_s} s Message: {e}" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - - if isinstance(response, tritonclient.http.InferResult): - outputs = { - output["name"]: response.as_numpy(output["name"]) for output in response.get_response()["outputs"] - } - else: - outputs = {output.name: response.as_numpy(output.name) for output in response.get_response().outputs} - - return outputs - - def _get_numpy_result(self, result): - if isinstance(result, tritonclient.grpc.InferResult): - result = {output.name: result.as_numpy(output.name) for output in result.get_response().outputs} - else: - result = {output["name"]: result.as_numpy(output["name"]) for output in result.get_response()["outputs"]} - return result - - def _debatch_result(self, result): - if self.is_batching_supported: - result = {name: data[0] for name, data in result.items()} - return result - - def _handle_lazy_init(self): - if not self._lazy_init: - self._wait_and_init_model_config(self._init_timeout_s) - - def _get_infer_extra_args(self): - if self._triton_url.scheme == "http": - return {} - # For the GRPC protocol, the timeout is passed to the infer method as client_timeout - # This timeout applies to the whole inference process and each network request - - # The ``infer`` supports also timeout argument for both GRPC and HTTP. - # It is applied at server side and supported only for dynamic batching. - # However, it is not used here yet and planned for future release - kwargs = {"client_timeout": self._inference_timeout_s} - return kwargs - - -class DecoupledModelClient(ModelClient): - """Synchronous client for decoupled model deployed on the Triton Inference Server.""" - - def __init__( - self, - url: str, - model_name: str, - model_version: Optional[str] = None, - *, - lazy_init: bool = True, - init_timeout_s: Optional[float] = None, - inference_timeout_s: Optional[float] = None, - model_config: Optional[TritonModelConfig] = None, - ensure_model_is_ready: bool = True, - ): - """Inits DecoupledModelClient for given decoupled model deployed on the Triton Inference Server. - - Common usage: - - ```python - client = DecoupledModelClient("localhost", "BERT") - for response in client.infer_sample(input1_sample, input2_sample): - print(response) - client.close() - ``` - - Args: - url: The Triton Inference Server url, e.g. `grpc://localhost:8001`. - In case no scheme is provided http scheme will be used as default. - In case no port is provided default port for given scheme will be used - - 8001 for grpc scheme, 8000 for http scheme. - model_name: name of the model to interact with. - model_version: version of the model to interact with. - If model_version is None inference on latest model will be performed. - The latest versions of the model are numerically the greatest version numbers. - lazy_init: if initialization should be performed just before sending first request to inference server. - init_timeout_s: timeout in seconds for the server and model to be ready. If not passed, the default timeout of 300 seconds will be used. - inference_timeout_s: timeout in seconds for a single model inference request. If not passed, the default timeout of 60 seconds will be used. - model_config: model configuration. If not passed, it will be read from inference server during initialization. - ensure_model_is_ready: if model should be checked if it is ready before first inference request. - - Raises: - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - PyTritonClientTimeoutError: - if `lazy_init` argument is False and wait time for server and model being ready exceeds `init_timeout_s`. - PyTritonClientInvalidUrlError: If provided Triton Inference Server url is invalid. - """ - super().__init__( - url, - model_name, - model_version, - lazy_init=lazy_init, - init_timeout_s=init_timeout_s, - inference_timeout_s=inference_timeout_s, - model_config=model_config, - ensure_model_is_ready=ensure_model_is_ready, - ) - if self._triton_url.scheme == "http": - raise PyTritonClientValueError("DecoupledModelClient is only supported for grpc protocol") - self._queue = Queue() - self._lock = Lock() - - def close(self): - """Close resources used by DecoupledModelClient.""" - _LOGGER.debug("Closing DecoupledModelClient") - if self._lock.acquire(blocking=False): - try: - super().close() - finally: - self._lock.release() - else: - _LOGGER.warning("DecoupledModelClient is stil streaming answers") - self._infer_client.stop_stream(False) - super().close() - - def _infer(self, inputs: _IOType, parameters, headers): - if not self._lock.acquire(blocking=False): - raise PyTritonClientInferenceServerError("Inference is already in progress") - if not self.model_config.decoupled: - raise PyTritonClientInferenceServerError("Model config is coupled. Use ModelClient instead.") - - inputs_wrapped, outputs_wrapped = self._create_request(inputs) - if parameters is not None: - raise PyTritonClientValueError("DecoupledModelClient does not support parameters") - if headers is not None: - raise PyTritonClientValueError("DecoupledModelClient does not support headers") - try: - _LOGGER.debug("Sending inference request to Triton Inference Server") - if self._infer_client._stream is None: - self._infer_client.start_stream(callback=lambda result, error: self._response_callback(result, error)) - - self._infer_client.async_stream_infer( - model_name=self._model_name, - model_version=self._model_version or "", - inputs=inputs_wrapped, - outputs=outputs_wrapped, - request_id=self._next_request_id, - enable_empty_final_response=True, - **self._get_infer_extra_args(), - ) - except tritonclient.utils.InferenceServerException as e: - # tritonclient.grpc raises execption with message containing "Deadline Exceeded" for timeout - if "Deadline Exceeded" in e.message(): - raise PyTritonClientTimeoutError( - f"Timeout occurred during inference request. Timeout: {self._inference_timeout_s} s. Message: {e.message()}" - ) from e - - raise PyTritonClientInferenceServerError( - f"Error occurred during inference request. Message: {e.message()}" - ) from e - except socket.timeout as e: # tritonclient.http raises socket.timeout for timeout - message = f"Timeout occurred during inference request. Timeout: {self._inference_timeout_s} s Message: {e}" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - except OSError as e: # tritonclient.http raises socket.error for connection error - message = f"Timeout occurred during inference request. Timeout: {self._inference_timeout_s} s Message: {e}" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - _LOGGER.debug("Returning response iterator") - return self._create_response_iterator() - - def _response_callback(self, response, error): - _LOGGER.debug(f"Received response from Triton Inference Server: {response}") - if error: - _LOGGER.error(f"Error occurred during inference request. Message: {error}") - self._queue.put(error) - else: - actual_response = response.get_response() - # Check if the object is not None - triton_final_response = actual_response.parameters.get("triton_final_response") - if triton_final_response and triton_final_response.bool_param: - self._queue.put(None) - else: - result = self._get_numpy_result(response) - self._queue.put(result) - - def _create_response_iterator(self): - try: - while True: - try: - item = self._queue.get(self._inference_timeout_s) - except Empty as e: - message = f"Timeout occurred during inference request. Timeout: {self._inference_timeout_s} s" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - if isinstance(item, Exception): - message = f"Error occurred during inference request. Message: {item.message()}" - _LOGGER.error(message) - raise PyTritonClientInferenceServerError(message) from item - - if item is None: - break - yield item - finally: - self._lock.release() - - def _debatch_result(self, result): - if self.is_batching_supported: - result = ({name: data[0] for name, data in result_.items()} for result_ in result) - return result - - def _get_infer_extra_args(self): - # kwargs = super()._get_infer_extra_args() - kwargs = {} - # kwargs["enable_empty_final_response"] = True - return kwargs - - -class AsyncioModelClient(BaseModelClient): - """Asyncio client for model deployed on the Triton Inference Server. - - This client is based on Triton Inference Server Python clients and GRPC library: - - ``tritonclient.http.aio.InferenceServerClient`` - - ``tritonclient.grpc.aio.InferenceServerClient`` - - It can wait for server to be ready with model loaded and then perform inference on it. - ``AsyncioModelClient`` supports asyncio context manager protocol. - - Typical usage: - - ```python - from pytriton.client import AsyncioModelClient - import numpy as np - - input1_sample = np.random.rand(1, 3, 224, 224).astype(np.float32) - input2_sample = np.random.rand(1, 3, 224, 224).astype(np.float32) - - client = AsyncioModelClient("localhost", "MyModel") - result_dict = await client.infer_sample(input1_sample, input2_sample) - print(result_dict["output_name"]) - await client.close() - ``` - """ - - def __init__( - self, - url: str, - model_name: str, - model_version: Optional[str] = None, - *, - lazy_init: bool = True, - init_timeout_s: Optional[float] = None, - inference_timeout_s: Optional[float] = None, - model_config: Optional[TritonModelConfig] = None, - ensure_model_is_ready: bool = True, - ): - """Inits ModelClient for given model deployed on the Triton Inference Server. - - If `lazy_init` argument is False, model configuration will be read - from inference server during initialization. - - Args: - url: The Triton Inference Server url, e.g. 'grpc://localhost:8001'. - In case no scheme is provided http scheme will be used as default. - In case no port is provided default port for given scheme will be used - - 8001 for grpc scheme, 8000 for http scheme. - model_name: name of the model to interact with. - model_version: version of the model to interact with. - If model_version is None inference on latest model will be performed. - The latest versions of the model are numerically the greatest version numbers. - lazy_init: if initialization should be performed just before sending first request to inference server. - init_timeout_s: timeout for server and model being ready. - model_config: model configuration. If not passed, it will be read from inference server during initialization. - ensure_model_is_ready: if model should be checked if it is ready before first inference request. - - Raises: - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - PyTritonClientTimeoutError: if `lazy_init` argument is False and wait time for server and model being ready exceeds `init_timeout_s`. - PyTritonClientUrlParseError: In case of problems with parsing url. - """ - super().__init__( - url=url, - model_name=model_name, - model_version=model_version, - lazy_init=lazy_init, - init_timeout_s=init_timeout_s, - inference_timeout_s=inference_timeout_s, - model_config=model_config, - ensure_model_is_ready=ensure_model_is_ready, - ) - - def get_lib(self): - """Get Triton Inference Server Python client library.""" - return {"grpc": tritonclient.grpc.aio, "http": tritonclient.http.aio}[self._triton_url.scheme.lower()] - - async def __aenter__(self): - """Create context for use AsyncioModelClient as a context manager.""" - _LOGGER.debug("Entering AsyncioModelClient context") - try: - if not self._lazy_init: - _LOGGER.debug("Waiting in AsyncioModelClient context for model to be ready") - await self._wait_and_init_model_config(self._init_timeout_s) - _LOGGER.debug("Model is ready in AsyncioModelClient context") - return self - except Exception as e: - _LOGGER.error("Error occurred during AsyncioModelClient context initialization") - await self.close() - raise e - - async def __aexit__(self, *_): - """Close resources used by AsyncioModelClient when exiting from context.""" - await self.close() - _LOGGER.debug("Exiting AsyncioModelClient context") - - async def close(self): - """Close resources used by _ModelClientBase.""" - _LOGGER.debug("Closing InferenceServerClient") - await self._general_client.close() - await self._infer_client.close() - _LOGGER.debug("InferenceServerClient closed") - - async def wait_for_model(self, timeout_s: float): - """Asynchronous wait for Triton Inference Server and deployed on it model readiness. - - Args: - timeout_s: timeout to server and model get into readiness state. - - Raises: - PyTritonClientTimeoutError: If server and model are not in readiness state before given timeout. - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - KeyboardInterrupt: If hosting process receives SIGINT - """ - _LOGGER.debug(f"Waiting for model {self._model_name} to be ready") - try: - await asyncio.wait_for( - asyncio_wait_for_model_ready( - self._general_client, self._model_name, self._model_version, timeout_s=timeout_s - ), - self._init_timeout_s, - ) - except asyncio.TimeoutError as e: - message = f"Timeout while waiting for model {self._model_name} to be ready for {self._init_timeout_s}s" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - - @property - async def model_config(self): - """Obtain configuration of model deployed on the Triton Inference Server. - - Also waits for server to get into readiness state. - """ - try: - if not self._model_config: - kwargs = self._get_model_config_extra_args() - _LOGGER.debug(f"Obtaining model config for {self._model_name}") - - self._model_config = await asyncio.wait_for( - asyncio_get_model_config( - self._general_client, - self._model_name, - self._model_version, - timeout_s=self._init_timeout_s, - **kwargs, - ), - self._init_timeout_s, - ) - _LOGGER.debug(f"Obtained model config for {self._model_name}") - return self._model_config - except asyncio.TimeoutError as e: - message = f"Timeout while waiting for model {self._model_name} to be ready for {self._init_timeout_s}s" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - - async def infer_sample( - self, - *inputs, - parameters: Optional[Dict[str, Union[str, int, bool]]] = None, - headers: Optional[Dict[str, Union[str, int, bool]]] = None, - **named_inputs, - ) -> Dict[str, np.ndarray]: - """Run asynchronous inference on single data sample. - - Typical usage: - - ```python - client = AsyncioModelClient("localhost", "MyModel") - result_dict = await client.infer_sample(input1, input2) - await client.close() - ``` - - Inference inputs can be provided either as positional or keyword arguments: - - ```python - result_dict = await client.infer_sample(input1, input2) - result_dict = await client.infer_sample(a=input1, b=input2) - ``` - - Mixing of argument passing conventions is not supported and will raise PyTritonClientRuntimeError. - - Args: - *inputs: inference inputs provided as positional arguments. - parameters: custom inference parameters. - headers: custom inference headers. - **named_inputs: inference inputs provided as named arguments. - - Returns: - dictionary with inference results, where dictionary keys are output names. - - Raises: - PyTritonClientValueError: if mixing of positional and named arguments passing detected. - PyTritonClientTimeoutError: - in case of first method call, `lazy_init` argument is False - and wait time for server and model being ready exceeds `init_timeout_s` - or inference time exceeds `timeout_s`. - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - PyTritonClientInferenceServerError: If error occurred on inference callable or Triton Inference Server side. - """ - _verify_inputs_args(inputs, named_inputs) - _verify_parameters(parameters) - _verify_parameters(headers) - - _LOGGER.debug(f"Running inference for {self._model_name}") - model_config = await self.model_config - _LOGGER.debug(f"Model config for {self._model_name} obtained") - - model_supports_batching = model_config.max_batch_size > 0 - if model_supports_batching: - if inputs: - inputs = tuple(data[np.newaxis, ...] for data in inputs) - elif named_inputs: - named_inputs = {name: data[np.newaxis, ...] for name, data in named_inputs.items()} - - _LOGGER.debug(f"Running _infer for {self._model_name}") - result = await self._infer(inputs or named_inputs, parameters, headers) - _LOGGER.debug(f"_infer for {self._model_name} finished") - if model_supports_batching: - result = {name: data[0] for name, data in result.items()} - - return result - - async def infer_batch( - self, - *inputs, - parameters: Optional[Dict[str, Union[str, int, bool]]] = None, - headers: Optional[Dict[str, Union[str, int, bool]]] = None, - **named_inputs, - ) -> Dict[str, np.ndarray]: - """Run asynchronous inference on batched data. - - Typical usage: - - ```python - client = AsyncioModelClient("localhost", "MyModel") - result_dict = await client.infer_batch(input1, input2) - await client.close() - ``` - - Inference inputs can be provided either as positional or keyword arguments: - - ```python - result_dict = await client.infer_batch(input1, input2) - result_dict = await client.infer_batch(a=input1, b=input2) - ``` - - Mixing of argument passing conventions is not supported and will raise PyTritonClientValueError. - - Args: - *inputs: inference inputs provided as positional arguments. - parameters: custom inference parameters. - headers: custom inference headers. - **named_inputs: inference inputs provided as named arguments. - - Returns: - dictionary with inference results, where dictionary keys are output names. - - Raises: - PyTritonClientValueError: if mixing of positional and named arguments passing detected. - PyTritonClientTimeoutError: - in case of first method call, `lazy_init` argument is False - and wait time for server and model being ready exceeds `init_timeout_s` - or inference time exceeds `timeout_s`. - PyTritonClientModelDoesntSupportBatchingError: if model doesn't support batching. - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - PyTritonClientInferenceServerError: If error occurred on inference callable or Triton Inference Server side. - """ - _verify_inputs_args(inputs, named_inputs) - _verify_parameters(parameters) - _verify_parameters(headers) - - _LOGGER.debug(f"Running inference for {self._model_name}") - model_config = await self.model_config - _LOGGER.debug(f"Model config for {self._model_name} obtained") - - model_supports_batching = model_config.max_batch_size > 0 - if not model_supports_batching: - _LOGGER.error(f"Model {model_config.model_name} doesn't support batching") - raise PyTritonClientModelDoesntSupportBatchingError( - f"Model {model_config.model_name} doesn't support batching - use infer_sample method instead" - ) - - _LOGGER.debug(f"Running _infer for {self._model_name}") - result = await self._infer(inputs or named_inputs, parameters, headers) - _LOGGER.debug(f"_infer for {self._model_name} finished") - return result - - async def _wait_and_init_model_config(self, init_timeout_s: float): - """Asynchronous wait for model and obtain model configuration. - - Args: - init_timeout_s: timeout for server and model being ready. - - Raises: - PyTritonClientTimeoutError: if wait time for server and model being ready exceeds `init_timeout_s` - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - """ - try: - should_finish_before_s = time.time() + init_timeout_s - _LOGGER.debug(f"Waiting for model {self._model_name} to be ready") - - await asyncio.wait_for(self.wait_for_model(init_timeout_s), init_timeout_s) - _LOGGER.debug(f"Model {self._model_name} is ready") - self._model_ready = True - - timeout_s = max(0.0, should_finish_before_s - time.time()) - _LOGGER.debug(f"Obtaining model config for {self._model_name}") - self._model_config = await asyncio.wait_for( - asyncio_get_model_config( - self._general_client, self._model_name, self._model_version, timeout_s=timeout_s - ), - timeout_s, - ) - _LOGGER.debug(f"Model config for {self._model_name} obtained") - except asyncio.TimeoutError as e: - _LOGGER.error(f"Timeout exceeded while waiting for model {self._model_name} to be ready") - raise PyTritonClientTimeoutError( - f"Timeout exceeded while waiting for model {self._model_name} to be ready" - ) from e - - def _validate_input(self, input_name, input_data): - if input_data.dtype == object and not isinstance(input_data.reshape(-1)[0], bytes): - raise RuntimeError( - f"Numpy array for {input_name!r} input with dtype=object should contain encoded strings \ - \\(e.g. into utf-8\\). Element type: {type(input_data.reshape(-1)[0])}" - ) - if input_data.dtype.type == np.str_: - raise RuntimeError( - "Unicode inputs are not supported. " - f"Encode numpy array for {input_name!r} input (ex. with np.char.encode(array, 'utf-8'))." - ) - - async def _execute_infer(self, model_config, inputs_wrapped, outputs_wrapped, parameters, headers): - try: - _LOGGER.debug(f"Sending InferRequest for {self._model_name}") - kwargs = self._get_infer_extra_args() - response = await self._infer_client.infer( - model_name=self._model_name, - model_version=self._model_version or "", - inputs=inputs_wrapped, - headers=headers, - outputs=outputs_wrapped, - request_id=self._next_request_id, - parameters=parameters, - **kwargs, - ) - except asyncio.exceptions.TimeoutError as e: - # HTTP aio client raises asyncio.exceptions.TimeoutError for timeout errors - message = f"Timeout exceeded while running inference for {self._model_name}" - _LOGGER.error(message) - raise PyTritonClientTimeoutError(message) from e - except tritonclient.utils.InferenceServerException as e: - message = f"Error occurred on Triton Inference Server side:\n {e.message()}" - _LOGGER.error(message) - if "Deadline Exceeded" in e.message(): - # GRPC aio client raises InferenceServerException with message "Deadline Exceeded" - # for timeout errors - raise PyTritonClientTimeoutError(message) from e - else: - raise PyTritonClientInferenceServerError(message) from e - _LOGGER.debug(f"Received InferResponse for {self._model_name}") - outputs = {output_spec.name: response.as_numpy(output_spec.name) for output_spec in model_config.outputs} - return outputs - - async def _infer(self, inputs: _IOType, parameters, headers) -> Dict[str, np.ndarray]: - if self._model_ready: - _LOGGER.debug(f"Waiting for model {self._model_name} config") - await self._wait_and_init_model_config(self._init_timeout_s) - _LOGGER.debug(f"Model wait finished for {self._model_name}") - - _LOGGER.debug(f"Obtaining config for {self._model_name}") - model_config = await self.model_config - _LOGGER.debug(f"Model config for {self._model_name} obtained") - - if isinstance(inputs, Tuple): - inputs = {input_spec.name: input_data for input_spec, input_data in zip(model_config.inputs, inputs)} - - inputs_wrapped = [] - for input_name, input_data in inputs.items(): - if isinstance(input_data, np.ndarray): - self._validate_input(input_name, input_data) - triton_dtype = tritonclient.utils.np_to_triton_dtype(input_data.dtype) - infer_input = self._triton_client_lib.InferInput(input_name, input_data.shape, triton_dtype) - infer_input.set_data_from_numpy(input_data) - input_wrapped = infer_input - inputs_wrapped.append(input_wrapped) - else: - raise PyTritonClientValueError( - f"Input {input_name} is not a numpy array. Got {type(input_data)} instead." - ) - - outputs_wrapped = [ - self._triton_client_lib.InferRequestedOutput(output_spec.name) for output_spec in model_config.outputs - ] - return await self._execute_infer(model_config, inputs_wrapped, outputs_wrapped, parameters, headers) - - def _handle_lazy_init(self): - # Asynchronous lazy initialization is done in __aenter__ method - pass - - def _get_init_extra_args(self): - # The inference timeout is used for both the HTTP and the GRPC protocols. However, - # the way the timeout is passed to the client differs depending on the protocol. - # For the HTTP protocol, the timeout is set in the ``__init__`` method as ``conn_timeout`` for both connection and request timeouts. - # For the GRPC protocol, the timeout - # is passed to the infer method as ``client_timeout``. - # Both protocols support timeouts correctly and will raise an exception - # if the network request or the inference process takes longer than the timeout. - # This is a design choice of the underlying tritonclient library. - - if self._triton_url.scheme != "http": - return {} - - kwargs = { - # This value sets the maximum time allowed for both connection and network requests in both model loading and inference process - "conn_timeout": self._inference_timeout_s, - } - return kwargs - - def _get_infer_extra_args(self): - if self._triton_url.scheme == "http": - return {} - # For the GRPC protocol, the timeout is passed to the infer method as client_timeout - # This timeout applies to the whole inference process and each network request - - # The ``infer`` supports also timeout argument for both GRPC and HTTP. - # It is applied at server side and supported only for dynamic batching. - # However, it is not used here yet and planned for future release - kwargs = {"client_timeout": self._inference_timeout_s} - return kwargs - - -@contextlib.contextmanager -def _hub_context(): - hub = gevent.get_hub() - try: - yield hub - finally: - hub.destroy() - - -_INIT = "init" -_WAIT_FOR_MODEL = "wait_for_model" -_MODEL_CONFIG = "model_config" -_INFER_BATCH = "infer_batch" -_INFER_SAMPLE = "infer_sample" -_CLOSE = "close" - - -class FuturesModelClient: - """A client for interacting with a model deployed on the Triton Inference Server using concurrent.futures. - - This client allows asynchronous inference requests using a thread pool executor. It can be used to perform inference - on a model by providing input data and receiving the corresponding output data. The client can be used in a `with` - statement to ensure proper resource management. - - Example usage with context manager: - - ```python - with FuturesModelClient("localhost", "MyModel") as client: - result_future = client.infer_sample(input1=input1_data, input2=input2_data) - # do something else - print(result_future.result()) - ``` - - Usage without context manager: - - ```python - client = FuturesModelClient("localhost", "MyModel") - result_future = client.infer_sample(input1=input1_data, input2=input2_data) - # do something else - print(result_future.result()) - client.close() - ``` - """ - - def __init__( - self, - url: str, - model_name: str, - model_version: Optional[str] = None, - *, - max_workers: int = 128, - max_queue_size: int = 128, - non_blocking: bool = False, - init_timeout_s: Optional[float] = None, - inference_timeout_s: Optional[float] = None, - ): - """Initializes the FuturesModelClient for a given model. - - Args: - url: The Triton Inference Server url, e.g. `grpc://localhost:8001`. - model_name: The name of the model to interact with. - model_version: The version of the model to interact with. If None, the latest version will be used. - max_workers: The maximum number of threads that can be used to execute the given calls. If None, there is not limit on the number of threads. - max_queue_size: The maximum number of requests that can be queued. If None, there is not limit on the number of requests. - non_blocking: If True, the client will raise a PyTritonClientQueueFullError if the queue is full. If False, the client will block until the queue is not full. - init_timeout_s: Timeout in seconds for server and model being ready. If non passed default 60 seconds timeout will be used. - inference_timeout_s: Timeout in seconds for the single model inference request. If non passed default 60 seconds timeout will be used. - """ - self._url = url - self._model_name = model_name - self._model_version = model_version - self._threads = [] - self._max_workers = max_workers - self._max_queue_size = max_queue_size - self._non_blocking = non_blocking - - if self._max_workers is not None and self._max_workers <= 0: - raise ValueError("max_workers must be greater than 0") - if self._max_queue_size is not None and self._max_queue_size <= 0: - raise ValueError("max_queue_size must be greater than 0") - - kwargs = {} - if self._max_queue_size is not None: - kwargs["maxsize"] = self._max_queue_size - self._queue = Queue(**kwargs) - self._queue.put((_INIT, None, None)) - self._init_timeout_s = _DEFAULT_FUTURES_INIT_TIMEOUT_S if init_timeout_s is None else init_timeout_s - self._inference_timeout_s = inference_timeout_s - self._closed = False - self._lock = Lock() - self._existing_client = None - - def __enter__(self): - """Create context for using FuturesModelClient as a context manager.""" - return self - - def __exit__(self, exc_type, exc_value, traceback): - """Close resources used by FuturesModelClient instance when exiting from the context.""" - self.close() - - def close(self, wait=True): - """Close resources used by FuturesModelClient. - - This method closes the resources used by the FuturesModelClient instance, including the Triton Inference Server connections. - Once this method is called, the FuturesModelClient instance should not be used again. - - Args: - wait: If True, then shutdown will not return until all running futures have finished executing. - """ - if self._closed: - return - _LOGGER.debug("Closing FuturesModelClient.") - - self._closed = True - for _ in range(len(self._threads)): - self._queue.put((_CLOSE, None, None)) - - if wait: - _LOGGER.debug("Waiting for futures to finish.") - for thread in self._threads: - thread.join() - - def wait_for_model(self, timeout_s: float) -> Future: - """Returns a Future object which result will be None when the model is ready. - - Typical usage: - - ```python - with FuturesModelClient("localhost", "BERT") as client - future = client.wait_for_model(300.) - # do something else - future.result() # wait rest of timeout_s time - # till return None if model is ready - # or raise PyTritonClientTimeutError - ``` - - Args: - timeout_s: The maximum amount of time to wait for the model to be ready, in seconds. - - Returns: - A Future object which result is None when the model is ready. - """ - return self._execute( - name=_WAIT_FOR_MODEL, - request=timeout_s, - ) - - def model_config(self) -> Future: - """Obtain the configuration of the model deployed on the Triton Inference Server. - - This method returns a Future object that will contain the TritonModelConfig object when it is ready. - Client will wait init_timeout_s for the server to get into readiness state before obtaining the model configuration. - - Returns: - A Future object that will contain the TritonModelConfig object when it is ready. - - Raises: - PyTritonClientClosedError: If the FuturesModelClient is closed. - """ - return self._execute(name=_MODEL_CONFIG) - - def infer_sample( - self, - *inputs, - parameters: Optional[Dict[str, Union[str, int, bool]]] = None, - headers: Optional[Dict[str, Union[str, int, bool]]] = None, - **named_inputs, - ) -> Future: - """Run asynchronous inference on a single data sample and return a Future object. - - This method allows the user to perform inference on a single data sample by providing input data and receiving the - corresponding output data. The method returns a Future object that wraps a dictionary of inference results, where dictionary keys are output names. - - Example usage: - - ```python - with FuturesModelClient("localhost", "BERT") as client: - result_future = client.infer_sample(input1=input1_data, input2=input2_data) - # do something else - print(result_future.result()) - ``` - - Inference inputs can be provided either as positional or keyword arguments: - - ```python - future = client.infer_sample(input1, input2) - future = client.infer_sample(a=input1, b=input2) - ``` - - Args: - *inputs: Inference inputs provided as positional arguments. - parameters: Optional dictionary of inference parameters. - headers: Optional dictionary of HTTP headers for the inference request. - **named_inputs: Inference inputs provided as named arguments. - - Returns: - A Future object wrapping a dictionary of inference results, where dictionary keys are output names. - - Raises: - PyTritonClientClosedError: If the FuturesModelClient is closed. - """ - return self._execute( - name=_INFER_SAMPLE, - request=(inputs, parameters, headers, named_inputs), - ) - - def infer_batch( - self, - *inputs, - parameters: Optional[Dict[str, Union[str, int, bool]]] = None, - headers: Optional[Dict[str, Union[str, int, bool]]] = None, - **named_inputs, - ) -> Future: - """Run asynchronous inference on batched data and return a Future object. - - This method allows the user to perform inference on batched data by providing input data and receiving the corresponding output data. - The method returns a Future object that wraps a dictionary of inference results, where dictionary keys are output names. - - Example usage: - - ```python - with FuturesModelClient("localhost", "BERT") as client: - future = client.infer_batch(input1_sample, input2_sample) - # do something else - print(future.result()) - ``` - - Inference inputs can be provided either as positional or keyword arguments: - - ```python - future = client.infer_batch(input1, input2) - future = client.infer_batch(a=input1, b=input2) - ``` - - Mixing of argument passing conventions is not supported and will raise PyTritonClientValueError. - - Args: - *inputs: Inference inputs provided as positional arguments. - parameters: Optional dictionary of inference parameters. - headers: Optional dictionary of HTTP headers for the inference request. - **named_inputs: Inference inputs provided as named arguments. - - Returns: - A Future object wrapping a dictionary of inference results, where dictionary keys are output names. - - Raises: - PyTritonClientClosedError: If the FuturesModelClient is closed. - """ - return self._execute(name=_INFER_BATCH, request=(inputs, parameters, headers, named_inputs)) - - def _execute(self, name, request=None): - if self._closed: - raise PyTritonClientClosedError("FutureModelClient is already closed") - self._extend_thread_pool() - future = Future() - if self._non_blocking: - try: - self._queue.put_nowait((future, request, name)) - except Full as e: - raise PyTritonClientQueueFullError("Queue is full") from e - else: - kwargs = {} - if self._inference_timeout_s is not None: - kwargs["timeout"] = self._inference_timeout_s - try: - self._queue.put((future, request, name), **kwargs) - except Full as e: - raise PyTritonClientQueueFullError("Queue is full") from e - return future - - def _extend_thread_pool(self): - if self._closed: - return - - with self._lock: - if not self._queue.empty() and (self._max_workers is None or len(self._threads) < self._max_workers): - _LOGGER.debug("Create new thread") - thread = Thread(target=self._worker) - self._threads.append(thread) - thread.start() - else: - _LOGGER.debug("No need to create new thread") - - def _client_request_executor(self, client, request, name): - _LOGGER.debug(f"Running {name} for {self._model_name}") - if name == _INFER_SAMPLE: - inputs, parameters, headers, named_inputs = request - result = client.infer_sample( - *inputs, - parameters=parameters, - headers=headers, - **named_inputs, - ) - elif name == _INFER_BATCH: - inputs, parameters, headers, named_inputs = request - result = client.infer_batch( - *inputs, - parameters=parameters, - headers=headers, - **named_inputs, - ) - elif name == _MODEL_CONFIG: - result = client.model_config - elif name == _WAIT_FOR_MODEL: - timeout_s = request - result = client.wait_for_model(timeout_s) - else: - raise PyTritonClientValueError(f"Unknown request name {name}") - self._set_existing_client(client) - return result - - def _create_client(self, lazy_init): - _LOGGER.debug(f"Creating ModelClient lazy_init={lazy_init}") - return ModelClient( - self._url, - self._model_name, - self._model_version, - lazy_init=lazy_init, - init_timeout_s=self._init_timeout_s, - inference_timeout_s=self._inference_timeout_s, - ) - - def _set_existing_client(self, client): - if client._model_config is not None: - with self._lock: - if self._existing_client is None: - _LOGGER.debug("Setting existing client") - self._existing_client = client - - def _remove_existing_client(self, client): - if client is not None: - with self._lock: - if self._existing_client is not None: - if self._existing_client is client: - _LOGGER.debug("Resetting existing client") - self._existing_client = None - - def _worker(self): - _LOGGER.debug("Starting worker thread") - client = None - # Work around for AttributeError: '_Threadlocal' object has no attribute 'hub' - # gevent/_hub_local.py", line 77, in gevent._gevent_c_hub_local.get_hub_noargs - with _hub_context(): - while True: - future, request, name = self._queue.get() - if future == _CLOSE: - _LOGGER.debug("Closing thread") - self._queue.task_done() - break - if future == _INIT: - with self._lock: - if self._existing_client is None: - try: - _LOGGER.debug("Initial client creation") - client = self._create_client(False) - _LOGGER.debug("Setting existing client") - self._existing_client = client - except Exception as e: - _LOGGER.warning(f"Error {e} occurred during init for {self._model_name}") - continue - try: - if client is None: - with self._lock: - if self._existing_client is not None: - _LOGGER.debug("Creating new client from existing client") - client = ModelClient.from_existing_client(self._existing_client) - if client is None: - _LOGGER.debug("Creating new client") - client = self._create_client(name == _WAIT_FOR_MODEL) - with client: - self._set_existing_client(client) - while True: - try: - result = self._client_request_executor(client, request, name) - _LOGGER.debug(f"Finished {name} for {self._model_name}") - future.set_result(result) - self._queue.task_done() - except Exception as e: - _LOGGER.error(f"Error {e} occurred during {name} for {self._model_name}") - future.set_exception(e) - self._queue.task_done() - break - future, request, name = self._queue.get() - if future == _CLOSE: - _LOGGER.debug("Closing thread") - self._queue.task_done() - return - except Exception as e: - _LOGGER.error(f"Error {e} occurred during {name} for {self._model_name}") - future.set_exception(e) - self._queue.task_done() - finally: - self._remove_existing_client(client) - client = None - _LOGGER.debug("Finishing worker thread") diff --git a/stf/stf-api-alternative/pytriton/pytriton/client/exceptions.py b/stf/stf-api-alternative/pytriton/pytriton/client/exceptions.py deleted file mode 100644 index 6619b4a318b7a0f00fe84d0d9b07086a662764d6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/client/exceptions.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Exceptions thrown in pytriton.client module.""" - - -class PyTritonClientError(Exception): - """Generic pytriton client exception.""" - - def __init__(self, message: str): - """Initialize exception with message. - - Args: - message: Error message - """ - self._message = message - - def __str__(self) -> str: - """String representation of error. - - Returns: - Message content - """ - return self._message - - @property - def message(self): - """Get the exception message. - - Returns: - The message associated with this exception, or None if no message. - - """ - return self._message - - -class PyTritonClientValueError(PyTritonClientError): - """Generic error raised in case of incorrect values are provided into API.""" - - pass - - -class PyTritonClientInvalidUrlError(PyTritonClientValueError): - """Error raised when provided Triton Inference Server url is invalid.""" - - pass - - -class PyTritonClientTimeoutError(PyTritonClientError): - """Timeout occurred during communication with the Triton Inference Server.""" - - pass - - -class PyTritonClientModelUnavailableError(PyTritonClientError): - """Model with given name and version is unavailable on the given Triton Inference Server.""" - - pass - - -class PyTritonClientClosedError(PyTritonClientError): - """Error raised in case of trying to use closed client.""" - - pass - - -class PyTritonClientModelDoesntSupportBatchingError(PyTritonClientError): - """Error raised in case of trying to infer batch on model not supporting batching.""" - - pass - - -class PyTritonClientInferenceServerError(PyTritonClientError): - """Error raised in case of error on inference callable or Triton Inference Server side.""" - - pass - - -class PyTritonClientQueueFullError(PyTritonClientError): - """Error raised in case of trying to push request to full queue.""" - - pass diff --git a/stf/stf-api-alternative/pytriton/pytriton/client/utils.py b/stf/stf-api-alternative/pytriton/pytriton/client/utils.py deleted file mode 100644 index e264a2e105899a57e357388cd0d319f01aeb42f4..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/client/utils.py +++ /dev/null @@ -1,385 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Utility module supporting model clients.""" -import dataclasses -import enum -import logging -import socket -import sys -import time -import urllib -import warnings -from typing import Optional, Union - -import tritonclient.grpc -import tritonclient.http -import tritonclient.http.aio -from grpc import RpcError -from tritonclient.utils import InferenceServerException - -from pytriton.client.exceptions import PyTritonClientInvalidUrlError, PyTritonClientTimeoutError -from pytriton.client.warnings import NotSupportedTimeoutWarning -from pytriton.constants import DEFAULT_GRPC_PORT, DEFAULT_HTTP_PORT -from pytriton.model_config.parser import ModelConfigParser - -_LOGGER = logging.getLogger(__name__) - -_TritonSyncClientType = Union[tritonclient.grpc.InferenceServerClient, tritonclient.http.InferenceServerClient] - -_DEFAULT_NETWORK_TIMEOUT_S = 60.0 # 1min -_DEFAULT_WAIT_FOR_SERVER_READY_TIMEOUT_S = 60.0 # 1min -_DEFAULT_WAIT_FOR_MODEL_TIMEOUT_S = 300.0 # 5min - -LATEST_MODEL_VERSION = "" - - -# Special value for model_version argument. If model_version is None, the latest version of the model is returned. - - -class ModelState(enum.Enum): - """Describe model state in Triton. - - Attributes: - LOADING: Loading of model - UNLOADING: Unloading of model - UNAVAILABLE: Model is missing or could not be loaded - READY: Model is ready for inference - """ - - LOADING = "LOADING" - UNLOADING = "UNLOADING" - UNAVAILABLE = "UNAVAILABLE" - READY = "READY" - - -def parse_http_response(models): - """Parse model repository index response from Triton Inference Server for HTTP.""" - models_states = {} - _LOGGER.debug("Parsing model repository index entries:") - for model in models: - _LOGGER.debug(f" name={model.get('name')} version={model.get('version')} state={model.get('state')}") - if not model.get("version"): - continue - - model_state = ModelState(model["state"]) if model.get("state") else ModelState.LOADING - models_states[(model["name"], model["version"])] = model_state - - return models_states - - -def parse_grpc_response(models): - """Parse model repository index response from Triton Inference Server for GRCP.""" - models_states = {} - _LOGGER.debug("Parsing model repository index entries:") - for model in models: - _LOGGER.debug(f" name={model.name} version={model.version} state={model.state}") - if not model.version: - continue - - model_state = ModelState(model.state) if model.state else ModelState.LOADING - models_states[(model.name, model.version)] = model_state - - return models_states - - -def get_model_state( - client: _TritonSyncClientType, - model_name: str, - model_version: Optional[str] = None, -) -> ModelState: - """Obtains state of the model deployed in Triton Inference Server. - - Args: - client: Triton Inference Server client to use for communication - model_name: name of the model which state we're requesting. - model_version: - version of the model which state we're requesting. - If model_version is None state of latest model is returned. - The latest versions of the model are the numerically greatest version numbers. - - Returns: - Model state. _ModelState.UNAVAILABLE is returned in case if model with given name and version is not found. - - """ - repository_index = client.get_model_repository_index() - if isinstance(repository_index, list): - models_states = parse_http_response(models=repository_index) - else: - models_states = parse_grpc_response(models=repository_index.models) - - if model_version is None: - requested_model_states = { - version: state for (name, version), state in models_states.items() if name == model_name - } - if not requested_model_states: - return ModelState.UNAVAILABLE - else: - requested_model_states = sorted(requested_model_states.items(), key=lambda item: int(item[0])) - latest_version, latest_version_state = requested_model_states[-1] - return latest_version_state - else: - state = models_states.get((model_name, model_version), ModelState.UNAVAILABLE) - return state - - -def get_model_config( - client: _TritonSyncClientType, - model_name: str, - model_version: Optional[str] = None, - timeout_s: Optional[float] = None, -): - """Obtain configuration of model deployed on the Triton Inference Server. - - Function waits for server readiness. - - Typical use: - - client = tritonclient.grpc.Client("localhost:8001") - model_config = get_model_config(client, "MyModel", "1", 60.0) - model_config = get_model_config(client, "MyModel") - - Args: - client: Triton Inference Server client to use for communication - model_name: name of the model which configuration we're requesting. - model_version: - version of the model which configuration we're requesting. - If model_version is None configuration of the latest model is returned. - The latest versions of the model are the numerically greatest version numbers. - timeout_s: timeout to finish model configuration obtain. Default value is 300.0 s. - - Returns: - Configuration of requested model. - - Raises: - PyTritonClientTimeoutError: If obtain of model configuration didn't finish before given timeout. - PyTritonClientModelUnavailableError: If model with given name (and version) is unavailable. - """ - wait_for_model_ready(client, model_name=model_name, model_version=model_version, timeout_s=timeout_s) - - model_version = model_version or "" - - _LOGGER.debug(f"Obtaining model {model_name} config") - if isinstance(client, tritonclient.grpc.InferenceServerClient): - response = client.get_model_config(model_name, model_version, as_json=True) - model_config = response["config"] - else: - model_config = client.get_model_config(model_name, model_version) - model_config = ModelConfigParser.from_dict(model_config) - _LOGGER.debug(f"Model config: {model_config}") - return model_config - - -def _warn_on_too_big_network_timeout(client: _TritonSyncClientType, timeout_s: float): - if isinstance(client, tritonclient.http.InferenceServerClient): - connection_pool = client._client_stub._connection_pool - network_reldiff_s = (connection_pool.network_timeout - timeout_s) / timeout_s - connection_reldiff_s = (connection_pool.connection_timeout - timeout_s) / timeout_s - rtol = 0.001 - if network_reldiff_s > rtol or connection_reldiff_s > rtol: - warnings.warn( - "Client network and/or connection timeout is smaller than requested timeout_s. This may cause unexpected behavior. " - f"network_timeout={connection_pool.network_timeout} " - f"connection_timeout={connection_pool.connection_timeout} " - f"timeout_s={timeout_s}", - NotSupportedTimeoutWarning, - stacklevel=1, - ) - - -def wait_for_server_ready( - client: _TritonSyncClientType, - timeout_s: Optional[float] = None, -): - """Waits for Triton Inference Server to be ready. - - Typical use: - - client = tritonclient.http.Client("localhost:8001") - wait_for_server_ready(client, timeout_s=600.0) - - Args: - client: Triton Inference Server client to use for communication - timeout_s: timeout to server get into readiness state. Default value is 60.0 s. - - Raises: - PyTritonClientTimeoutError: If obtain of model configuration didn't finish before given timeout. - """ - timeout_s = timeout_s if timeout_s is not None else _DEFAULT_WAIT_FOR_SERVER_READY_TIMEOUT_S - should_finish_before_s = time.time() + timeout_s - _warn_on_too_big_network_timeout(client, timeout_s) - - def _is_server_ready(): - try: - return client.is_server_ready() and client.is_server_live() - except InferenceServerException: - return False - except (RpcError, ConnectionError, socket.gaierror): # GRPC and HTTP clients raises these errors - return False - except Exception as e: - _LOGGER.exception(f"Exception while checking server readiness: {e}") - raise e - - timeout_s = max(0.0, should_finish_before_s - time.time()) - _LOGGER.debug(f"Waiting for server to be ready (timeout={timeout_s})") - is_server_ready = _is_server_ready() - while not is_server_ready: - time.sleep(min(1.0, timeout_s)) - is_server_ready = _is_server_ready() - if not is_server_ready and time.time() >= should_finish_before_s: - raise PyTritonClientTimeoutError("Waiting for server to be ready timed out.") - - -def wait_for_model_ready( - client: _TritonSyncClientType, - model_name: str, - model_version: Optional[str] = None, - timeout_s: Optional[float] = None, -): - """Wait for Triton Inference Server to be ready. - - Args: - client: Triton Inference Server client to use for communication. - model_name: name of the model to wait for readiness. - model_version: - version of the model to wait for readiness. - If model_version is None waiting for latest version of the model. - The latest versions of the model are the numerically greatest version numbers. - timeout_s: timeout to server and model get into readiness state. Default value is 300.0 s. - - Raises: - PyTritonClientTimeoutError: If server readiness didn't finish before given timeout. - """ - model_version = model_version or "" - model_version_msg = model_version or LATEST_MODEL_VERSION - timeout_s = timeout_s if timeout_s is not None else _DEFAULT_WAIT_FOR_MODEL_TIMEOUT_S - should_finish_before_s = time.time() + timeout_s - - wait_for_server_ready(client, timeout_s=timeout_s) - timeout_s = max(0.0, should_finish_before_s - time.time()) - _LOGGER.debug(f"Waiting for model {model_name}/{model_version_msg} to be ready (timeout={timeout_s})") - is_model_ready = client.is_model_ready(model_name, model_version) - while not is_model_ready: - time.sleep(min(1.0, timeout_s)) - is_model_ready = client.is_model_ready(model_name, model_version) - - if not is_model_ready and time.time() >= should_finish_before_s: - raise PyTritonClientTimeoutError( - f"Waiting for model {model_name}/{model_version_msg} to be ready timed out." - ) - - -def create_client_from_url( - url: str, network_timeout_s: Optional[float] = None -) -> _TritonSyncClientType: # type: ignore - """Create Triton Inference Server client. - - Args: - url: url of the server to connect to. - If url doesn't contain scheme (e.g. "localhost:8001") http scheme is added. - If url doesn't contain port (e.g. "localhost") default port for given scheme is added. - network_timeout_s: timeout for client commands. Default value is 60.0 s. - - Returns: - Triton Inference Server client. - - Raises: - PyTritonClientInvalidUrlError: If provided Triton Inference Server url is invalid. - """ - url = TritonUrl.from_url(url) - triton_client_lib = {"grpc": tritonclient.grpc, "http": tritonclient.http}[url.scheme] - - if url.scheme == "grpc": - # by default grpc client has very large number of timeout, thus we want to make it equal to http client timeout - network_timeout_s = _DEFAULT_NETWORK_TIMEOUT_S if network_timeout_s is None else network_timeout_s - warnings.warn( - f"tritonclient.grpc doesn't support timeout for other commands than infer. Ignoring network_timeout: {network_timeout_s}.", - NotSupportedTimeoutWarning, - stacklevel=1, - ) - - triton_client_init_kwargs = {} - if network_timeout_s is not None: - triton_client_init_kwargs.update( - **{ - "grpc": {}, - "http": {"connection_timeout": network_timeout_s, "network_timeout": network_timeout_s}, - }[url.scheme] - ) - - _LOGGER.debug(f"Creating InferenceServerClient for {url.with_scheme} with {triton_client_init_kwargs}") - return triton_client_lib.InferenceServerClient(url.without_scheme, **triton_client_init_kwargs) - - -@dataclasses.dataclass -class TritonUrl: - """TritonUrl class for parsing Triton Inference Server url. - - Attributes: - scheme: scheme of the url (http or grpc) - hostname: hostname of the url - port: port of the url - - Examples: - triton_url = TritonUrl.from_url("localhost:8000") - triton_url.with_scheme - >>> "http://localhost:8000" - triton_url.without_scheme - >>> "localhost:8000" - triton_url.scheme, triton_url.hostname, triton_url.port - >>> ("http", "localhost", 8000) - """ - - scheme: str - hostname: str - port: int - - @classmethod - def from_url(cls, url): - """Parse triton url and create TritonUrl instance. - - Returns: - TritonUrl object with scheme, hostname and port. - """ - if not isinstance(url, str): - raise PyTritonClientInvalidUrlError(f"Invalid url {url}. Url must be a string.") - try: - parsed_url = urllib.parse.urlparse(url) - # change in py3.9+ - # https://github.com/python/cpython/commit/5a88d50ff013a64fbdb25b877c87644a9034c969 - if sys.version_info < (3, 9) and not parsed_url.scheme and "://" in parsed_url.path: - raise ValueError(f"Invalid url {url}. Only grpc and http are supported.") - if (not parsed_url.scheme and "://" not in parsed_url.path) or ( - sys.version_info >= (3, 9) and parsed_url.scheme and not parsed_url.netloc - ): - _LOGGER.debug(f"Adding http scheme to {url}") - parsed_url = urllib.parse.urlparse(f"http://{url}") - - scheme = parsed_url.scheme.lower() - if scheme not in ["grpc", "http"]: - raise ValueError(f"Invalid scheme {scheme}. Only grpc and http are supported.") - - port = parsed_url.port or {"grpc": DEFAULT_GRPC_PORT, "http": DEFAULT_HTTP_PORT}[scheme] - except ValueError as e: - raise PyTritonClientInvalidUrlError(f"Invalid url {url}") from e - return cls(scheme, parsed_url.hostname, port) - - @property - def with_scheme(self): - """Get Triton Inference Server url with scheme.""" - return f"{self.scheme}://{self.hostname}:{self.port}" - - @property - def without_scheme(self): - """Get Triton Inference Server url without scheme.""" - return f"{self.hostname}:{self.port}" diff --git a/stf/stf-api-alternative/pytriton/pytriton/client/warnings.py b/stf/stf-api-alternative/pytriton/pytriton/client/warnings.py deleted file mode 100644 index 7e121689e5d311f747037c530c290d1606839cbe..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/client/warnings.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Warnings for pytriton module.""" - - -class PyTritonWarning(UserWarning): - """Base warning for pytriton module.""" - - pass - - -class NotSupportedTimeoutWarning(PyTritonWarning): - """A warning for client, which doesn't support timeout configuration.""" - - pass diff --git a/stf/stf-api-alternative/pytriton/pytriton/constants.py b/stf/stf-api-alternative/pytriton/pytriton/constants.py deleted file mode 100644 index 695bd6c762aae79d44d7e5128dbcc1ce713af640..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/constants.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# noqa: D104 -"""Constants for pytriton.""" -import os -import pathlib - -DEFAULT_HTTP_PORT = 8000 -DEFAULT_GRPC_PORT = 8001 -DEFAULT_METRICS_PORT = 8002 -TRITON_LOCAL_IP = "127.0.0.1" -TRITON_CONTEXT_FIELD_NAME = "triton_context" -TRITON_PYTHON_BACKEND_INTERPRETER_DIRNAME = "python_backend_interpreter" -DEFAULT_TRITON_STARTUP_TIMEOUT_S = 120 -CREATE_TRITON_CLIENT_TIMEOUT_S = 10 - -__DEFAULT_PYTRITON_HOME = os.path.join(os.getenv("XDG_CACHE_HOME", "$HOME/.cache"), "pytriton") -__PYTRITON_HOME = os.path.expanduser(os.path.expandvars(os.getenv("PYTRITON_HOME", __DEFAULT_PYTRITON_HOME))) -PYTRITON_HOME = pathlib.Path(__PYTRITON_HOME).resolve().absolute() diff --git a/stf/stf-api-alternative/pytriton/pytriton/decorators.py b/stf/stf-api-alternative/pytriton/pytriton/decorators.py deleted file mode 100644 index 3a2d1be9e36d8356fd3a384a354007953b5d955b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/decorators.py +++ /dev/null @@ -1,657 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Inference callable decorators.""" -import collections -import dataclasses -import inspect -import itertools -import operator -import typing -from bisect import bisect_left -from collections.abc import MutableMapping -from typing import Callable, Dict, List, NamedTuple, Optional, Tuple, Union - -import numpy as np -import wrapt - -from pytriton.constants import TRITON_CONTEXT_FIELD_NAME -from pytriton.exceptions import PyTritonBadParameterError, PyTritonRuntimeError, PyTritonValidationError -from pytriton.model_config.triton_model_config import TritonModelConfig -from pytriton.proxy.communication import _serialize_byte_tensor - -_WrappedWithWrapper = NamedTuple( - "WrappedWithWrapper", [("wrapped", Optional[Callable]), ("wrapper", Optional[Callable])] -) - - -InputNames = typing.List[str] -InferenceRequest = typing.Dict[str, np.ndarray] -InferenceRequests = typing.Union[typing.List[InferenceRequest], typing.Tuple[InferenceRequest, ...]] -InferenceResult = typing.Dict[str, np.ndarray] -InferenceResults = typing.Union[typing.List[InferenceResult], typing.Tuple[InferenceResult, ...]] - - -def get_inference_request_batch_size(inference_request: InferenceRequest) -> int: - """Get batch size from triton request. - - Args: - inference_request (InferenceRequest): Triton request. - - Returns: - int: Batch size. - """ - first_input_value = next(iter(inference_request.values())) - batch_size, *dims = first_input_value.shape - return batch_size - - -def _get_wrapt_stack(wrapped) -> List[_WrappedWithWrapper]: - """Returns stack of wrapped functions with wrappers applied to inference callable.""" - stack = [] - infer_callable = wrapped - while infer_callable is not None: - stack.append(_WrappedWithWrapper(infer_callable, getattr(infer_callable, "_self_wrapper", None))) - infer_callable = getattr(infer_callable, "__wrapped__", None) - - return stack - - -class ModelConfigDict(MutableMapping): - """Dictionary for storing model configs for inference callable.""" - - def __init__(self): - """Create ModelConfigDict object.""" - self._data: Dict[str, TritonModelConfig] = {} - self._keys: List[Callable] = [] - - def __getitem__(self, infer_callable: Callable) -> TritonModelConfig: - """Get model config for inference callable.""" - key = self._get_model_config_key(infer_callable) - return self._data[key] - - def __setitem__(self, infer_callable: Callable, item: TritonModelConfig): - """Set model config for inference callable.""" - self._keys.append(infer_callable) - key = self._get_model_config_key(infer_callable) - self._data[key] = item - - def __delitem__(self, infer_callable: Callable): - """Delete model config for inference callable.""" - key = self._get_model_config_key(infer_callable) - del self._data[key] - - def __len__(self): - """Get number of inference callable keys.""" - return len(self._data) - - def __iter__(self): - """Iterate over inference callable keys.""" - return iter(self._keys) - - @staticmethod - def _get_model_config_key(infer_callable: Callable) -> str: - """Prepares TritonModelConfig dictionary key for function/callable.""" - dict_key = infer_callable - if inspect.ismethod(dict_key) and dict_key.__name__ == "__call__": - dict_key = dict_key.__self__ - return str(dict_key) - - -@dataclasses.dataclass -class TritonContext: - """Triton context definition class.""" - - model_configs: ModelConfigDict = dataclasses.field(default_factory=ModelConfigDict) - - -def get_triton_context(wrapped, instance) -> TritonContext: - """Retrieves triton context from callable. - - It is used in @triton_context to get triton context registered by triton binding in inference callable. - If you use @triton_context decorator you do not need this function. - """ - caller = instance or wrapped - if not hasattr(caller, "__triton_context__"): - raise PyTritonValidationError("Wrapped function or object must bound with triton to get __triton_context__") - return caller.__triton_context__ - - -def get_model_config(wrapped, instance) -> TritonModelConfig: - """Retrieves instance of TritonModelConfig from callable. - - It is internally used in convert_output function to get output list from model. - You can use this in custom decorators if you need access to model_config information. - If you use @triton_context decorator you do not need this function (you can get model_config directly - from triton_context passing function/callable to dictionary getter). - """ - return get_triton_context(wrapped, instance).model_configs[wrapped] - - -def convert_output( - outputs: Union[Dict, List, Tuple], wrapped=None, instance=None, model_config: Optional[TritonModelConfig] = None -): - """Converts output from tuple ot list to dictionary. - - It is utility function useful for mapping output list into dictionary of outputs. - Currently, it is used in @sample and @batch decorators (we assume that user can return list or tuple of outputs - instead of dictionary if this list matches output list in model config (size and order). - """ - if isinstance(outputs, dict): - return outputs - elif isinstance(outputs, (list, tuple)): - if model_config is None: - model_config = get_model_config(wrapped, instance) - if len(outputs) != len(model_config.outputs): - raise PyTritonValidationError("Outputs length different than config outputs length") - outputs = {config_output.name: output for config_output, output in zip(model_config.outputs, outputs)} - return outputs - else: - raise PyTritonValidationError(f"Unsupported output type {type(outputs)}.") - - -@wrapt.decorator -def sample(wrapped, instance, args, kwargs): - """Decorator is used for non-batched inputs to convert from one element list of requests to request kwargs. - - Decorator takes first request and convert it into named inputs. - Useful with non-batching models - instead of one element list of request, we will get named inputs - `kwargs`. - """ - kwargs.update(args[0][0]) - outputs = wrapped(*args[1:], **kwargs) - outputs = convert_output(outputs, wrapped, instance) - return [outputs] - - -@wrapt.decorator -def batch(wrapped, instance, args, kwargs): - """Decorator for converting list of request dicts to dict of input batches. - - Converts list of request dicts to dict of input batches. - It passes **kwargs to inference callable where each named input contains numpy array with batch of requests - received by Triton server. - We assume that each request has the same set of keys (you can use group_by_keys decorator before - using @batch decorator if your requests may have different set of keys). - - Raises: - PyTritonValidationError: If the requests have different set of keys. - ValueError: If the output tensors have different than expected batch sizes. Expected batch size is - calculated as a sum of batch sizes of all requests. - """ - req_list = args[0] - input_names = req_list[0].keys() - - for req_dict2 in req_list[1:]: - if input_names != req_dict2.keys(): - raise PyTritonValidationError("Cannot batch requests with different set of inputs keys") - - inputs = {} - for model_input in input_names: - concatenated_input_data = np.concatenate([req[model_input] for req in req_list]) - inputs[model_input] = concatenated_input_data - - args = args[1:] - new_kwargs = dict(kwargs) - new_kwargs.update(inputs) - outputs = wrapped(*args, **new_kwargs) - - def _split_result(_result): - outputs = convert_output(_result, wrapped, instance) - output_names = outputs.keys() - - requests_total_batch_size = sum(get_inference_request_batch_size(req) for req in req_list) - not_matching_tensors_shapes = { - output_name: output_tensor.shape - for output_name, output_tensor in outputs.items() - if output_tensor.shape[0] != requests_total_batch_size - } - if not_matching_tensors_shapes: - raise ValueError( - f"Received output tensors with different batch sizes: {', '.join(': '.join(map(str, item)) for item in not_matching_tensors_shapes.items())}. " - f"Expected batch size: {requests_total_batch_size}. " - ) - - out_list = [] - start_idx = 0 - for request in req_list: - # get batch_size of first input for each request - assume that all inputs have same batch_size - request_batch_size = get_inference_request_batch_size(request) - req_output_dict = {} - for _output_ind, output_name in enumerate(output_names): - req_output = outputs[output_name][start_idx : start_idx + request_batch_size, ...] - req_output_dict[output_name] = req_output - out_list.append(req_output_dict) - start_idx += request_batch_size - return out_list - - if inspect.isgenerator(outputs): - return (_split_result(_result) for _result in outputs) - else: - return _split_result(outputs) - - -def group_by_values(*keys, pad_fn: typing.Optional[typing.Callable[[InferenceRequests], InferenceRequests]] = None): - """Decorator for grouping requests by values of selected keys. - - This function splits a batch into multiple sub-batches based on the specified keys values and - calls the decorated function with each sub-batch. This is particularly useful when working with models - that require dynamic parameters sent by the user. - - For example, given an input of the form: - - {"sentences": [b"Sentence1", b"Sentence2", b"Sentence3"], "param1": [1, 1, 2], "param2": [1, 1, 1]} - - Using @group_by_values("param1", "param2") will split the batch into two sub-batches: - - [ - {"sentences": [b"Sentence1", b"Sentence2"], "param1": [1, 1], "param2": [1, 1]}, - {"sentences": [b"Sentence3"], "param1": [2], "param2": [1]} - ] - - This decorator should be used after the @batch decorator. - - Example usage: - - @batch - @group_by_values("param1", "param2") - def infer_fun(**inputs): - ... - return outputs - - Args: - *keys: List of keys to group by. - pad_fn: Optional function to pad the batch to the same size before merging again to a single batch. - - Returns: - The decorator function. - """ - - def value_to_key(value): - if isinstance(value, np.ndarray): - if value.dtype == np.object_ or value.dtype.type == np.bytes_: - return _serialize_byte_tensor(value) - else: - return value.tobytes() - return value - - def _get_sort_key_for_sample(_request, _sample_idx: int): - return tuple(value_to_key(_request[_key][_sample_idx]) for _key in keys) - - def _group_request(_request: InferenceRequest, _batch_size: int): - idx_inputs = [(sample_idx, _get_sort_key_for_sample(_request, sample_idx)) for sample_idx in range(_batch_size)] - idx_inputs.sort(key=operator.itemgetter(1)) - for _, group in itertools.groupby(idx_inputs, key=operator.itemgetter(1)): - _samples_idxes, _ = zip(*group) - grouped_request = {input_name: value[_samples_idxes, ...] for input_name, value in _request.items()} - yield _samples_idxes, grouped_request - - @wrapt.decorator - def _wrapper(wrapped, instance, args, kwargs): - wrappers_stack = [ - callable_with_wrapper.wrapper - for callable_with_wrapper in _get_wrapt_stack(wrapped) - if callable_with_wrapper.wrapper is not None - ] - if batch in wrappers_stack: - raise PyTritonRuntimeError("The @group_by_values decorator must be used after the @batch decorator.") - - request = {k: v for k, v in kwargs.items() if k not in _SPECIAL_KEYS} - other_kwargs = {k: v for k, v in kwargs.items() if k in _SPECIAL_KEYS} - - batch_size = get_inference_request_batch_size(request) - sample_indices_with_interim_result = [] - for sample_indices, _grouped_sub_request in _group_request(request, batch_size): - interim_result = wrapped(*args, **_grouped_sub_request, **other_kwargs) - sample_indices_with_interim_result.append((sample_indices, interim_result)) - - if pad_fn is not None: - indices, results = tuple(map(tuple, zip(*sample_indices_with_interim_result))) - results = pad_fn(results) - sample_indices_with_interim_result = tuple(zip(indices, results)) - - _, first_result_data = sample_indices_with_interim_result[0] - result = { - output_name: np.zeros((batch_size,) + data.shape[1:], dtype=data.dtype) - for output_name, data in first_result_data.items() - } - for indices, results in sample_indices_with_interim_result: - for output_name, data in results.items(): - result[output_name][indices, ...] = data - - return result - - return _wrapper - - -class ConstantPadder: - """Padder that pads the given batches with a constant value.""" - - def __init__(self, pad_value=0): - """Initialize the padder. - - Args: - pad_value (int, optional): Padding value. Defaults to 0. - """ - self.pad_value = pad_value - - def __call__(self, batches_list: InferenceResults) -> InferenceResults: - """Pad the given batches with the specified value to pad size enabling further batching to single arrays. - - Args: - batches_list (List[Dict[str, np.ndarray]]): List of batches to pad. - - Returns: - List[Dict[str, np.ndarray]]: List of padded batches. - - Raises: - PyTritonRuntimeError: If the input arrays for a given input name have different dtypes. - """ - - def _get_padded_shape(_batches: List[np.ndarray]) -> Tuple[int, ...]: - """Get the shape of the padded array without batch axis.""" - return tuple(np.max([batch.shape[1:] for batch in _batches if batch is not None], axis=0)) - - def _get_padded_dtype(_batches: List[np.ndarray]) -> np.dtype: - dtypes = [batch.dtype for batch in _batches if batch is not None] - result_dtype = dtypes[0] - - if not all(dtype.kind == result_dtype.kind for dtype in dtypes): - raise PyTritonRuntimeError("All input arrays for given input name must have the same dtype.") - - # for bytes (encoded string) or unicode string need to obtain the max length - if result_dtype.kind in "SU": - order_and_kind = result_dtype.str[:2] - max_len = max([int(dtype.str[2:]) for dtype in dtypes]) - result_dtype = f"{order_and_kind}{max_len}" - else: - if not all(dtype == result_dtype for dtype in dtypes): - raise PyTritonRuntimeError("All input arrays for given input name must have the same dtype.") - - return np.dtype(result_dtype) - - input_names = list( - collections.OrderedDict.fromkeys(input_name for batch in batches_list for input_name in batch.keys()) - ) - batches_by_name = {input_name: [batch.get(input_name) for batch in batches_list] for input_name in input_names} - for input_batches in batches_by_name.values(): - result_shape, result_dtype = _get_padded_shape(input_batches), _get_padded_dtype(input_batches) - for batch_idx, batch in enumerate(input_batches): - if batch is not None: - input_batches[batch_idx] = np.pad( - batch, - [(0, 0)] + [(0, b - a) for a, b in zip(batch.shape[1:], result_shape)], - mode="constant", - constant_values=self.pad_value if result_dtype.kind not in ["S", "U", "O"] else b"", - ).astype(result_dtype) - - return [ - {name: batches[batch_idx] for name, batches in batches_by_name.items() if batches[batch_idx] is not None} - for batch_idx in range(len(batches_list)) - ] - - -@wrapt.decorator -def group_by_keys(wrapped, instance, args, kwargs): - """Group by keys. - - Decorator prepares groups of requests with the same set of keys and calls wrapped function - for each group separately (it is convenient to use this decorator before batching, because the batching decorator - requires consistent set of inputs as it stacks them into batches). - """ - inputs = args[0] - idx_inputs = [(idx, tuple(sorted(input.keys())), input) for idx, input in enumerate(inputs)] - idx_inputs.sort(key=operator.itemgetter(1)) - idx_groups_res = [] - for _, group in itertools.groupby(idx_inputs, key=operator.itemgetter(1)): - idx, _key, sample_list = zip(*group) - args = (list(sample_list),) + args[1:] - out = wrapped(*args, **kwargs) - idx_groups_res.extend(zip(idx, out)) - - idx_groups_res.sort(key=operator.itemgetter(0)) - res_flat = [r[1] for r in idx_groups_res] - return res_flat - - -def fill_optionals(**defaults): - """This decorator ensures that any missing inputs in requests are filled with default values specified by the user. - - Default values should be NumPy arrays without batch axis. - - If you plan to group requests ex. with - [@group_by_keys][pytriton.decorators.group_by_keys] or - [@group_by_vales][pytriton.decorators.group_by_values] decorators - provide default values for optional parameters at the beginning of decorators stack. - The other decorators can then group requests into bigger batches resulting in a better model performance. - - Typical use: - - @fill_optionals() - @group_by_keys() - @batch - def infer_fun(**inputs): - ... - return outputs - - Args: - defaults: keyword arguments containing default values for missing inputs - - - If you have default values for some optional parameter it is good idea to provide them at the very beginning, - so the other decorators (e.g. @group_by_keys) can make bigger consistent groups. - """ - - def _verify_defaults(model_config: TritonModelConfig): - inputs = {spec.name: spec for spec in model_config.inputs} - not_matching_default_names = sorted(set(defaults) - set(inputs)) - if not_matching_default_names: - raise PyTritonBadParameterError(f"Could not found {', '.join(not_matching_default_names)} inputs") - - non_numpy_items = {k: v for k, v in defaults.items() if not isinstance(v, np.ndarray)} - if non_numpy_items: - raise PyTritonBadParameterError( - f"Could not use {', '.join([f'{k}={v}' for k, v in non_numpy_items.items()])} defaults " - "as they are not NumPy arrays" - ) - - not_matching_dtypes = {k: (v.dtype, inputs[k].dtype) for k, v in defaults.items() if v.dtype != inputs[k].dtype} - if not_matching_dtypes: - non_matching_dtypes_str_list = [ - f"{name}: dtype={have_dtype} expected_dtype={expected_dtype}" - for name, (have_dtype, expected_dtype) in not_matching_dtypes.items() - ] - raise PyTritonBadParameterError( - f"Could not use {', '.join(non_matching_dtypes_str_list)} " - f"defaults as they have different than input signature dtypes" - ) - - def _shape_match(_have_shape, _expected_shape): - return len(_have_shape) == len(_expected_shape) and all( - e == -1 or h == e for h, e in zip(_have_shape, _expected_shape) - ) - - not_matching_shapes = { - k: (v.shape, inputs[k].shape) for k, v in defaults.items() if not _shape_match(v.shape, inputs[k].shape) - } - if not_matching_shapes: - non_matching_shapes_str_list = [ - f"{name}: shape={have_shape} expected_shape={expected_shape}" - for name, (have_shape, expected_shape) in not_matching_shapes.items() - ] - raise PyTritonBadParameterError( - f"Could not use {', '.join(non_matching_shapes_str_list)} " - f"defaults as they have different than input signature shapes" - ) - - @wrapt.decorator - def _wrapper(wrapped, instance, args, kwargs): - model_config = get_model_config(wrapped, instance) - _verify_defaults(model_config) - # verification if not after group wrappers is in group wrappers - - (requests,) = args - - model_supports_batching = model_config.batching - for request in requests: - batch_size = get_inference_request_batch_size(request) if model_supports_batching else None - for default_key, default_value in defaults.items(): - if default_key in request: - continue - - if model_supports_batching: - ones_reps = (1,) * default_value.ndim # repeat once default_value on each axis - axis_reps = (batch_size,) + ones_reps # ... except on batch axis. we repeat it batch_size times - default_value = np.tile(default_value, axis_reps) - - request[default_key] = default_value - return wrapped(*args, **kwargs) - - return _wrapper - - -@wrapt.decorator -def triton_context(wrapped, instance, args, kwargs): - """Adds triton context. - - It gives you additional argument passed to the function in **kwargs called 'triton_context'. - You can read model config from it and in the future possibly have some interaction with triton. - """ - kwargs[TRITON_CONTEXT_FIELD_NAME] = get_triton_context(wrapped, instance) - return wrapped(*args, **kwargs) - - -@wrapt.decorator -def pad_batch(wrapped, instance, args, kwargs): - """Add padding to the inputs batches. - - Decorator appends last rows to the inputs multiple times to get desired batch size (preferred batch size or - max batch size from model config whatever is closer to current input size). - """ - inputs = {k: v for k, v in kwargs.items() if k != "__triton_context__"} - first_input = next(iter(inputs.values())) - config = get_model_config(wrapped, instance) - batch_sizes = ( - [] - if (config.batcher is None or config.batcher.preferred_batch_size is None) - else sorted(config.batcher.preferred_batch_size) - ) - batch_sizes.append(config.max_batch_size) - batch_size = batch_sizes[bisect_left(batch_sizes, first_input.shape[0])] - - new_inputs = { - input_name: np.repeat( - input_array, - np.concatenate( - [np.ones(input_array.shape[0] - 1), np.array([batch_size - input_array.shape[0] + 1])] - ).astype(np.int64), - axis=0, - ) - for input_name, input_array in inputs.items() - } - - kwargs.update(new_inputs) - return wrapped(*args, **kwargs) - - -_SPECIAL_KEYS = ["__triton_context__"] - - -def first_value(*keys: str, squeeze_single_values=True, strict: bool = True): - """This decorator overwrites selected inputs with first element of the given input. - - It can be used in two ways: - - 1. Wrapping a single request inference callable by chaining with @batch decorator: - @batch - @first_value("temperature") - def infer_fn(**inputs): - ... - return result - - 2. Wrapping a multiple requests inference callable: - @first_value("temperature") - def infer_fn(requests): - ... - return results - - By default, the decorator squeezes single value arrays to scalars. - This behavior can be disabled by setting the `squeeze_single_values` flag to False. - - By default, the decorator checks the equality of the values on selected values. - This behavior can be disabled by setting the `strict` flag to False. - - Wrapper can only be used with models that support batching. - - Args: - keys: The input keys selected for conversion. - squeeze_single_values: squeeze single value ND array to scalar values. Defaults to True. - strict: enable checking if all values on single selected input of request are equal. Defaults to True. - - Raises: - PyTritonRuntimeError: if not all values on a single selected input of the request are equal - and the strict flag is set to True. Additionally, if the decorator is used with a model that doesn't support batching, - PyTritonBadParameterError: if any of the keys passed to the decorator are not allowed. - """ - if any(k in _SPECIAL_KEYS for k in keys): - not_allowed_keys = [key for key in keys if key in _SPECIAL_KEYS] - raise PyTritonBadParameterError( - f"The keys {', '.join(not_allowed_keys)} are not allowed as keys for @first_value wrapper. " - f"The set of not allowed keys are {', '.join(_SPECIAL_KEYS)}" - ) - - @wrapt.decorator - def wrapper(wrapped, instance, args, kwargs): - model_config = get_model_config(wrapped, instance) - if not model_config.batching: - raise PyTritonRuntimeError("The @first_value decorator can only be used with models that support batching.") - - def _replace_inputs_with_first_value(_request): - for input_name in keys: - if input_name not in _request: - continue - - values = _request[input_name] - if strict: - # do not set axis for arrays with strings (object) or models not supporting batching - axis_of_uniqueness = None if values.dtype == object else 0 - unique_values = np.unique(values, axis=axis_of_uniqueness) - if len(unique_values) > 1: - raise PyTritonRuntimeError( - f"The values on the {input_name!r} input are not equal. " - "To proceed, either disable strict mode in @first_value wrapper " - "or ensure that the values always are consistent. " - f"The current values of {input_name!r} are {_request[input_name]!r}." - ) - - _first_value = values[0] - if ( - squeeze_single_values - and not np.isscalar(_first_value) - and all(dim == 1 for dim in _first_value.shape) - ): - _dim_0_array = np.squeeze(_first_value) - _first_value = _dim_0_array[()] # obtain scalar from 0-dim array with numpy type - - _request[input_name] = _first_value - return _request - - inputs_names = set(kwargs) - set(_SPECIAL_KEYS) - if inputs_names: - kwargs = _replace_inputs_with_first_value(kwargs) - return wrapped(*args, **kwargs) - else: - requests, *other_args = args - requests = [_replace_inputs_with_first_value(request) for request in requests] - return wrapped(requests, *other_args, **kwargs) - - return wrapper diff --git a/stf/stf-api-alternative/pytriton/pytriton/exceptions.py b/stf/stf-api-alternative/pytriton/pytriton/exceptions.py deleted file mode 100644 index 7bcaff50ac46a10449bf70b27bb69d909279197f..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/exceptions.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""PyTriton exceptions definition.""" - - -class PyTritonError(Exception): - """Generic PyTriton exception.""" - - def __init__(self, message: str): - """Initialize exception with message. - - Args: - message: Error message - """ - self._message = message - - def __str__(self) -> str: - """Return exception as a string. - - Returns: - Message content - """ - return self._message - - @property - def message(self): - """Get the exception message. - - Returns: - The message associated with this exception, or None if no message. - - """ - return self._message - - -class PyTritonValidationError(PyTritonError): - """PyTriton configuration validation exception.""" - - pass - - -class PyTritonInvalidOperationError(PyTritonError): - """PyTriton invalid operation exception.""" - - pass - - -class PyTritonBadParameterError(PyTritonError): - """PyTriton invalid parameter exception.""" - - pass - - -class PyTritonModelConfigError(PyTritonError): - """PyTriton invalid model config exception.""" - - pass - - -class PyTritonUnrecoverableError(PyTritonError): - """Unrecoverable error occurred in inference callable, thus no further inferences possible.""" - - pass - - -class PyTritonRuntimeError(PyTritonError): - """Raised when an error is detected that doesn’t fall in any of the other categories.""" - - pass diff --git a/stf/stf-api-alternative/pytriton/pytriton/model_config/__init__.py b/stf/stf-api-alternative/pytriton/pytriton/model_config/__init__.py deleted file mode 100644 index 9698bf59ee712a76ff439a991a2089f2c4edeac8..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/model_config/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# noqa: D104 -from .common import DeviceKind, DynamicBatcher, QueuePolicy, TimeoutAction # noqa: F401 -from .model_config import ModelConfig # noqa: F401 -from .tensor import Tensor # noqa: F401 diff --git a/stf/stf-api-alternative/pytriton/pytriton/model_config/common.py b/stf/stf-api-alternative/pytriton/pytriton/model_config/common.py deleted file mode 100644 index 8976ed860478af21b4c05402a730b72125482515..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/model_config/common.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Common structures for internal and external ModelConfig.""" -import dataclasses -import enum -from typing import Dict, Optional - - -class DeviceKind(enum.Enum): - """Device kind for model deployment. - - Args: - KIND_AUTO: Automatically select the device for model deployment. - KIND_CPU: Model is deployed on CPU. - KIND_GPU: Model is deployed on GPU. - """ - - KIND_AUTO = "KIND_AUTO" - KIND_CPU = "KIND_CPU" - KIND_GPU = "KIND_GPU" - - -class TimeoutAction(enum.Enum): - """Timeout action definition for timeout_action QueuePolicy field. - - Args: - REJECT: Reject the request and return error message accordingly. - DELAY: Delay the request until all other requests at the same (or higher) priority levels - that have not reached their timeouts are processed. - """ - - REJECT = "REJECT" - DELAY = "DELAY" - - -@dataclasses.dataclass -class QueuePolicy: - """Model queue policy configuration. - - More in Triton Inference Server [documentation] - [documentation]: https://github.com/triton-inference-server/common/blob/main/protobuf/model_config.proto#L1037 - - Args: - timeout_action: The action applied to timed-out request. - default_timeout_microseconds: The default timeout for every request, in microseconds. - allow_timeout_override: Whether individual request can override the default timeout value. - max_queue_size: The maximum queue size for holding requests. - """ - - timeout_action: TimeoutAction = TimeoutAction.REJECT - default_timeout_microseconds: int = 0 - allow_timeout_override: bool = False - max_queue_size: int = 0 - - -@dataclasses.dataclass -class DynamicBatcher: - """Dynamic batcher configuration. - - More in Triton Inference Server [documentation] - [documentation]: https://github.com/triton-inference-server/common/blob/main/protobuf/model_config.proto#L1104 - - Args: - max_queue_delay_microseconds: The maximum time, in microseconds, a request will be delayed in - the scheduling queue to wait for additional requests for batching. - preferred_batch_size: Preferred batch sizes for dynamic batching. - preserve_ordering : Should the dynamic batcher preserve the ordering of responses to - match the order of requests received by the scheduler. - priority_levels: The number of priority levels to be enabled for the model. - default_priority_level: The priority level used for requests that don't specify their priority. - default_queue_policy: The default queue policy used for requests. - priority_queue_policy: Specify the queue policy for the priority level. - """ - - max_queue_delay_microseconds: int = 0 - preferred_batch_size: Optional[list] = None - preserve_ordering: bool = False - priority_levels: int = 0 - default_priority_level: int = 0 - default_queue_policy: Optional[QueuePolicy] = None - priority_queue_policy: Optional[Dict[int, QueuePolicy]] = None diff --git a/stf/stf-api-alternative/pytriton/pytriton/model_config/generator.py b/stf/stf-api-alternative/pytriton/pytriton/model_config/generator.py deleted file mode 100644 index 000e6521b6b30c5ba49f4286450c45606bfda6d0..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/model_config/generator.py +++ /dev/null @@ -1,285 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Generator class for creating Triton model config. - -The class consume the TritonModelConfig object as a constructor argument and produce the Triton model config in form of -dict or file. - - Typical usage example: - - model_config = TritonModelConfig(model_name="simple") - generator = ModelConfigGenerator(model_config) - generator.to_file("/path/to/config.pbtxt") -""" -import json -import logging -import pathlib -from typing import Dict, Union - -import numpy as np -from google.protobuf import json_format, text_format # pytype: disable=pyi-error - -from pytriton.exceptions import PyTritonBadParameterError - -from .triton_model_config import DynamicBatcher, TensorSpec, TritonModelConfig - -try: - import tritonclient.grpc as grpc_client - from tritonclient import utils as client_utils # noqa: F401 -except ImportError: - try: - import tritonclientutils as client_utils # noqa: F401 - import tritongrpcclient as grpc_client - except ImportError: - client_utils = None - grpc_client = None - -LOGGER = logging.getLogger(__name__) - - -class ModelConfigGenerator: - """Generate the protobuf config from ModelConfig object.""" - - def __init__(self, config: TritonModelConfig): - """Initialize generator. - - Args: - config: model config object - """ - self._config = config - - def to_file(self, config_path: Union[str, pathlib.Path]) -> str: - """Serialize ModelConfig to prototxt and save to config_path directory. - - Args: - config_path: path to configuration file - - Returns: - A string with generated model configuration - """ - from tritonclient.grpc import model_config_pb2 # pytype: disable=import-error - - # https://github.com/triton-inference-server/common/blob/main/protobuf/model_config.proto - model_config = self.get_config() - LOGGER.debug(f"Generated Triton config:\n{json.dumps(model_config, indent=4)}") - - config_payload = json_format.ParseDict(model_config, model_config_pb2.ModelConfig()) - LOGGER.debug(f"Generated Triton config payload:\n{config_payload}") - - config_path = pathlib.Path(config_path) - config_path.parent.mkdir(parents=True, exist_ok=True) - - model_config_bytes = text_format.MessageToBytes(config_payload) - - # WAR: triton requires max_batch_size = 0 to be explicit written - # while this is not stored in payload during MessageToBytes - if model_config["max_batch_size"] == 0: - model_config_bytes += b"max_batch_size: 0\n" - - with config_path.open("wb") as cfg: - cfg.write(model_config_bytes) - - LOGGER.debug(f"Generated config stored in {config_path}") - - return config_payload - - def get_config(self) -> Dict: - """Create a Triton model config from ModelConfig object. - - Returns: - Dict with model configuration data - """ - model_config = {"name": self._config.model_name, "backend": self._config.backend} - self._set_batching(model_config) - self._set_model_signature(model_config) - self._set_instance_group(model_config) - self._set_model_transaction_policy(model_config) - self._set_backend_parameters(model_config) - self._set_response_cache(model_config) - return model_config - - def _set_batching(self, model_config: Dict) -> None: - """Configure batching for model deployment on Triton Inference Server. - - Args: - model_config: Dict with model config for Triton Inference Server - """ - if not self._config.batching: - model_config["max_batch_size"] = 0 - LOGGER.debug("Batching for model is disabled. The `max_batch_size` field value set to 0.") - return - elif self._config.max_batch_size < 1: - raise PyTritonBadParameterError("The `max_batch_size` must be greater or equal to 1.") - - model_config["max_batch_size"] = self._config.max_batch_size - if isinstance(self._config.batcher, DynamicBatcher): - dynamic_batching_config = {} - if self._config.batcher.max_queue_delay_microseconds > 0: - dynamic_batching_config["maxQueueDelayMicroseconds"] = int( - self._config.batcher.max_queue_delay_microseconds - ) - - if self._config.batcher.preferred_batch_size: - dynamic_batching_config["preferredBatchSize"] = [ - int(bs) for bs in self._config.batcher.preferred_batch_size - ] - - if self._config.batcher.preserve_ordering: - dynamic_batching_config["preserveOrdering"] = self._config.batcher.preserve_ordering - - if self._config.batcher.priority_levels: - dynamic_batching_config["priorityLevels"] = self._config.batcher.priority_levels - - if self._config.batcher.default_priority_level: - if self._config.batcher.default_priority_level > self._config.batcher.priority_levels: - raise PyTritonBadParameterError( - "The `default_priority_level` must be between 1 and " f"{self._config.batcher.priority_levels}." - ) - dynamic_batching_config["defaultPriorityLevel"] = self._config.batcher.default_priority_level - - if self._config.batcher.default_queue_policy: - priority_queue_policy_config = { - "timeoutAction": self._config.batcher.default_queue_policy.timeout_action.value, - "defaultTimeoutMicroseconds": int( - self._config.batcher.default_queue_policy.default_timeout_microseconds - ), - "allowTimeoutOverride": self._config.batcher.default_queue_policy.allow_timeout_override, - "maxQueueSize": int(self._config.batcher.default_queue_policy.max_queue_size), - } - dynamic_batching_config["defaultQueuePolicy"] = priority_queue_policy_config - - if self._config.batcher.priority_queue_policy: - if not self._config.batcher.priority_levels: - raise PyTritonBadParameterError( - "Provide the `priority_levels` if you want to define `priority_queue_policy` " - "for Dynamic Batching." - ) - - priority_queue_policy_config = {} - for priority, queue_policy in self._config.batcher.priority_queue_policy.items(): - if priority < 0 or priority > self._config.batcher.priority_levels: - raise PyTritonBadParameterError( - f"Invalid `priority`={priority} provided. The value must be between " - f"1 and {self._config.batcher.priority_levels}." - ) - - priority_queue_policy_config[priority] = { - "timeoutAction": queue_policy.timeout_action.value, - "defaultTimeoutMicroseconds": int(queue_policy.default_timeout_microseconds), - "allowTimeoutOverride": queue_policy.allow_timeout_override, - "maxQueueSize": int(queue_policy.max_queue_size), - } - - dynamic_batching_config["priorityQueuePolicy"] = priority_queue_policy_config - - model_config["dynamic_batching"] = dynamic_batching_config - else: - LOGGER.debug("Default batching used") - - def _set_instance_group(self, model_config: Dict) -> None: - """Configure instance group for model deployment on Triton Inference Server. - - Args: - model_config: Dict with model config for Triton Inference Server - """ - instance_groups = [] - for device_kind, count in self._config.instance_group.items(): - instance_groups.append( - { - "count": count, - "kind": device_kind.value, - } - ) - - if instance_groups: - model_config["instance_group"] = instance_groups - - def _set_model_transaction_policy(self, model_config: Dict) -> None: - """Configure model transaction policy for model deployment on Triton Inference Server. - - Args: - model_config: Dict with model config for Triton Inference Server - """ - if self._config.decoupled: - model_config["model_transaction_policy"] = {"decoupled": True} - - def _set_backend_parameters(self, model_config: Dict) -> None: - """Configure backend parameters for model deployment on Triton Inference Server. - - Args: - model_config: Dict with model config for Triton Inference Server - """ - parameters = {} - for key, value in self._config.backend_parameters.items(): - parameters[key] = { - "string_value": str(value), - } - - if parameters: - model_config["parameters"] = parameters - - def _set_model_signature(self, model_config: Dict) -> None: - """Configure model signature for model deployment on Triton Inference Server. - - Args: - model_config: Dict with model config for Triton Inference Server - - """ - - def _rewrite_io_spec(spec_: TensorSpec) -> Dict: - if spec_.dtype in [np.object_, object, bytes, np.bytes_]: - dtype = "TYPE_STRING" - else: - # pytype: disable=attribute-error - dtype = spec_.dtype().dtype - # pytype: enable=attribute-error - dtype = f"TYPE_{client_utils.np_to_triton_dtype(dtype)}" - - dims = spec_.shape - - item = { - "name": spec_.name, - "dims": list(dims), - "data_type": dtype, - } - - if spec_.optional: - item["optional"] = True - - return item - - if self._config.inputs: - model_config["input"] = [_rewrite_io_spec(spec) for spec in self._config.inputs] - - if self._config.outputs: - outputs = [_rewrite_io_spec(spec) for spec in self._config.outputs] - if outputs: - optional_outputs = [o for o in outputs if o.get("optional")] - if optional_outputs: - raise PyTritonBadParameterError( - "Optional flag for outputs is not supported. " - f"Outputs marked as optional: {', '.join([o['name'] for o in optional_outputs])}." - ) - model_config["output"] = outputs - - def _set_response_cache(self, model_config: Dict): - """Configure response cache for model. - - Args: - model_config: Dictionary where configuration is attached. - """ - if self._config.response_cache: - model_config["response_cache"] = { - "enable": self._config.response_cache.enable, - } diff --git a/stf/stf-api-alternative/pytriton/pytriton/model_config/model_config.py b/stf/stf-api-alternative/pytriton/pytriton/model_config/model_config.py deleted file mode 100644 index 7d35a363927928be502f0f065f88303a52b79fc9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/model_config/model_config.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Model configurations. - -Dataclasses with specialized deployment paths for models on Triton. The purpose of this module is to provide clear options -to configure models of given types. - -The dataclasses are exposed in the user API. -""" -import dataclasses - -from pytriton.model_config import DynamicBatcher - - -@dataclasses.dataclass -class ModelConfig: - """Additional model configuration for running model through Triton Inference Server. - - Args: - batching: Flag to enable/disable batching for model. - max_batch_size: The maximal batch size that would be handled by model. - batcher: Configuration of Dynamic Batching for the model. - response_cache: Flag to enable/disable response cache for the model - decoupled: Flag to enable/disable decoupled from requests execution - """ - - batching: bool = True - max_batch_size: int = 4 - batcher: DynamicBatcher = dataclasses.field(default_factory=DynamicBatcher) - response_cache: bool = False - decoupled: bool = False diff --git a/stf/stf-api-alternative/pytriton/pytriton/model_config/parser.py b/stf/stf-api-alternative/pytriton/pytriton/model_config/parser.py deleted file mode 100644 index f26606604ad14d5c8e5bb190550c6430fa9d4acb..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/model_config/parser.py +++ /dev/null @@ -1,257 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""ModelConfigParser class definition. - -Provide functionality to parse the Triton model configuration stored in file or form of dictionary into the object of -class ModelConfig. - - Examples of use: - - # Parse from dict - model_config = ModelConfigParser.from_dict(model_config_dict) - - # Parse from file - model_config = ModelConfigParser.from_file("/path/to/config.pbtxt") - -""" -import json -import logging -import pathlib -from typing import Dict - -import numpy as np -from google.protobuf import json_format, text_format # pytype: disable=pyi-error - -from pytriton.exceptions import PyTritonModelConfigError - -from .common import QueuePolicy, TimeoutAction -from .triton_model_config import DeviceKind, DynamicBatcher, ResponseCache, TensorSpec, TritonModelConfig - -try: - import tritonclient.grpc as grpc_client - from tritonclient import utils as client_utils # noqa: F401 -except ImportError: - try: - import tritonclientutils as client_utils # noqa: F401 - import tritongrpcclient as grpc_client - except ImportError: - client_utils = None - grpc_client = None - -LOGGER = logging.getLogger(__name__) - - -class ModelConfigParser: - """Provide functionality to parse dictionary or file to ModelConfig object.""" - - @classmethod - def from_dict(cls, model_config_dict: Dict) -> TritonModelConfig: - """Create ModelConfig from configuration stored in dictionary. - - Args: - model_config_dict: Dictionary with model config - - Returns: - A ModelConfig object with data parsed from the dictionary - """ - LOGGER.debug(f"Parsing Triton config model from dict: \n{json.dumps(model_config_dict, indent=4)}") - - if model_config_dict.get("max_batch_size", 0) > 0: - batching = True - else: - batching = False - - dynamic_batcher_config = model_config_dict.get("dynamic_batching") - if dynamic_batcher_config is not None: - batcher = cls._parse_dynamic_batching(dynamic_batcher_config) - else: - batcher = None - - instance_group = { - DeviceKind(entry["kind"]): entry.get("count") for entry in model_config_dict.get("instance_group", []) - } - - decoupled = model_config_dict.get("model_transaction_policy", {}).get("decoupled", False) - - backend_parameters_config = model_config_dict.get("parameters", []) - if isinstance(backend_parameters_config, list): - # If the backend_parameters_config is a list of strings, use them as keys with empty values - LOGGER.debug(f"backend_parameters_config is a list of strings: {backend_parameters_config}") - backend_parameters = {name: "" for name in backend_parameters_config} - elif isinstance(backend_parameters_config, dict): - # If the backend_parameters_config is a dictionary, use the key and "string_value" fields as key-value pairs - LOGGER.debug(f"backend_parameters_config is a dictionary: {backend_parameters_config}") - backend_parameters = { - name: backend_parameters_config[name]["string_value"] for name in backend_parameters_config - } - else: - # Otherwise, raise an error - LOGGER.error( - f"Invalid type {type(backend_parameters_config)} for backend_parameters_config: {backend_parameters_config}" - ) - raise TypeError(f"Invalid type for backend_parameters_config: {type(backend_parameters_config)}") - - inputs = [ - cls.rewrite_io_spec(item, "input", idx) for idx, item in enumerate(model_config_dict.get("input", [])) - ] or None - outputs = [ - cls.rewrite_io_spec(item, "output", idx) for idx, item in enumerate(model_config_dict.get("output", [])) - ] or None - - response_cache_config = model_config_dict.get("response_cache") - if response_cache_config: - response_cache = cls._parse_response_cache(response_cache_config) - else: - response_cache = None - - return TritonModelConfig( - model_name=model_config_dict["name"], - batching=batching, - max_batch_size=model_config_dict.get("max_batch_size", 0), - batcher=batcher, - inputs=inputs, - outputs=outputs, - instance_group=instance_group, - decoupled=decoupled, - backend_parameters=backend_parameters, - response_cache=response_cache, - ) - - @classmethod - def from_file(cls, *, config_path: pathlib.Path) -> TritonModelConfig: - """Create ModelConfig from configuration stored in file. - - Args: - config_path: location of file with model config - - Returns: - A ModelConfig object with data parsed from the file - """ - from tritonclient.grpc import model_config_pb2 # pytype: disable=import-error - - LOGGER.debug(f"Parsing Triton config model config_path={config_path}") - - with config_path.open("r") as config_file: - payload = config_file.read() - model_config_proto = text_format.Parse(payload, model_config_pb2.ModelConfig()) - - model_config_dict = json_format.MessageToDict(model_config_proto, preserving_proto_field_name=True) - return ModelConfigParser.from_dict(model_config_dict=model_config_dict) - - @classmethod - def rewrite_io_spec(cls, item: Dict, io_type: str, idx: int) -> TensorSpec: - """Rewrite the IO Spec provided in form of dictionary to TensorSpec. - - Args: - item: IO data for input - io_type: Type of the IO (input or output) - idx: Index of IO - - Returns: - TensorSpec with input or output data - """ - name = item.get("name") - if not name: - raise PyTritonModelConfigError(f"Name for {io_type} at index {idx} not provided.") - - data_type = item.get("data_type") - if not data_type: - raise PyTritonModelConfigError(f"Data type for {io_type} with name `{name}` not defined.") - - data_type_val = data_type.split("_") - if len(data_type_val) != 2: - raise PyTritonModelConfigError( - f"Invalid data type `{data_type}` for {io_type} with name `{name}` not defined. " - "The expected name is TYPE_{type}." - ) - - data_type = data_type_val[1] - if data_type == "STRING": - dtype = np.bytes_ - else: - dtype = client_utils.triton_to_np_dtype(data_type) - if dtype is None: - raise PyTritonModelConfigError(f"Unsupported data type `{data_type}` for {io_type} with name `{name}`") - - dtype = np.dtype("bool") if dtype == bool else dtype - - dims = item.get("dims", []) - if not dims: - raise PyTritonModelConfigError(f"Dimension for {io_type} with name `{name}` not defined.") - - shape = tuple(int(s) for s in dims) - - optional = item.get("optional", False) - return TensorSpec(name=item["name"], shape=shape, dtype=dtype, optional=optional) - - @classmethod - def _parse_dynamic_batching(cls, dynamic_batching_config: Dict) -> DynamicBatcher: - """Parse config to create DynamicBatcher object. - - Args: - dynamic_batching_config: Configuration of dynamic batcher from config - - Returns: - DynamicBatcher object with configuration - """ - default_queue_policy = None - default_queue_policy_config = dynamic_batching_config.get("default_queue_policy") - if default_queue_policy_config: - default_queue_policy = QueuePolicy( - timeout_action=TimeoutAction( - default_queue_policy_config.get("timeout_action", TimeoutAction.REJECT.value) - ), - default_timeout_microseconds=int(default_queue_policy_config.get("default_timeout_microseconds", 0)), - allow_timeout_override=bool(default_queue_policy_config.get("allow_timeout_override", False)), - max_queue_size=int(default_queue_policy_config.get("max_queue_size", 0)), - ) - - priority_queue_policy = None - priority_queue_policy_config = dynamic_batching_config.get("priority_queue_policy") - if priority_queue_policy_config: - priority_queue_policy = {} - for priority, queue_policy_config in priority_queue_policy_config.items(): - queue_policy = QueuePolicy( - timeout_action=TimeoutAction(queue_policy_config.get("timeout_action", TimeoutAction.REJECT.value)), - default_timeout_microseconds=int(queue_policy_config.get("default_timeout_microseconds", 0)), - allow_timeout_override=bool(queue_policy_config.get("allow_timeout_override", False)), - max_queue_size=int(queue_policy_config.get("max_queue_size", 0)), - ) - priority_queue_policy[int(priority)] = queue_policy - - batcher = DynamicBatcher( - preferred_batch_size=dynamic_batching_config.get("preferred_batch_size"), - max_queue_delay_microseconds=int(dynamic_batching_config.get("max_queue_delay_microseconds", 0)), - preserve_ordering=bool(dynamic_batching_config.get("preserve_ordering", False)), - priority_levels=int(dynamic_batching_config.get("priority_levels", 0)), - default_priority_level=int(dynamic_batching_config.get("default_priority_level", 0)), - default_queue_policy=default_queue_policy, - priority_queue_policy=priority_queue_policy, - ) - return batcher - - @classmethod - def _parse_response_cache(cls, response_cache_config: Dict) -> ResponseCache: - """Parse config for response cache. - - Args: - response_cache_config: response cache configuration - - Returns: - ResponseCache object with configuration - """ - response_cache = ResponseCache( - enable=bool(response_cache_config["enable"]), - ) - return response_cache diff --git a/stf/stf-api-alternative/pytriton/pytriton/model_config/tensor.py b/stf/stf-api-alternative/pytriton/pytriton/model_config/tensor.py deleted file mode 100644 index 909466223c172ce44460816178ea37fc7f4b1341..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/model_config/tensor.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tensor object definition. - -Describe the model input or output. - - Examples of use: - - # Minimal constructors - tensor = Tensor(dtype=np.bytes_, shape=(-1,)) - tensor = Tensor(dtype=np.float32, shape=(-1,)) - - # Type definition from existing object - a = np.array([1, 2, 3, 4]) - tensor = Tensor(dtype=a.dtype, shape=(-1,)) - - # Custom name - tensor = Tensor(name="data", dtype=np.float32, shape=(16,)) -""" -import dataclasses -from typing import Optional, Type, Union - -import numpy as np - - -@dataclasses.dataclass(frozen=True) -class Tensor: - """Model input and output definition for Triton deployment. - - Args: - shape: Shape of the input/output tensor. - dtype: Data type of the input/output tensor. - name: Name of the input/output of model. - optional: Flag to mark if input is optional. - """ - - shape: tuple - dtype: Union[np.dtype, Type[np.dtype], Type[object]] - name: Optional[str] = None - optional: Optional[bool] = False - - def __post_init__(self): - """Override object values on post init or field override.""" - if isinstance(self.dtype, np.dtype): - object.__setattr__(self, "dtype", self.dtype.type) # pytype: disable=attribute-error diff --git a/stf/stf-api-alternative/pytriton/pytriton/model_config/triton_model_config.py b/stf/stf-api-alternative/pytriton/pytriton/model_config/triton_model_config.py deleted file mode 100644 index fc0ba5ec7154ad66b003eb48c60ef3698bb9e8ef..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/model_config/triton_model_config.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""ModelConfig related objects.""" -import dataclasses -from typing import Dict, Optional, Sequence, Type, Union - -import numpy as np - -from .common import DeviceKind, DynamicBatcher - - -@dataclasses.dataclass -class ResponseCache: - """Model response cache configuration. - - More in Triton Inference Server [documentation] - [documentation]: https://github.com/triton-inference-server/common/blob/main/protobuf/model_config.proto#L1765 - """ - - enable: bool - - -@dataclasses.dataclass -class TensorSpec: - """Stores specification of single tensor. This includes name, shape and dtype.""" - - name: str - shape: tuple - dtype: Union[Type[np.dtype], Type[object]] - optional: Optional[bool] = False - - -@dataclasses.dataclass -class TritonModelConfig: - """Triton Model Config dataclass for simplification and specialization of protobuf config generation. - - More in Triton Inference Server [documentation] - [documentation]: https://github.com/triton-inference-server/common/blob/main/protobuf/model_config.proto - """ - - model_name: str - model_version: int = 1 - max_batch_size: int = 4 - batching: bool = True - batcher: Optional[DynamicBatcher] = None - instance_group: Dict[DeviceKind, Optional[int]] = dataclasses.field(default_factory=lambda: {}) - decoupled: bool = False - backend_parameters: Dict[str, str] = dataclasses.field(default_factory=lambda: {}) - inputs: Optional[Sequence[TensorSpec]] = None - outputs: Optional[Sequence[TensorSpec]] = None - response_cache: Optional[ResponseCache] = None - - @property - def backend(self) -> str: - """Return backend parameter.""" - return "python" diff --git a/stf/stf-api-alternative/pytriton/pytriton/models/__init__.py b/stf/stf-api-alternative/pytriton/pytriton/models/__init__.py deleted file mode 100644 index 8010bd32129eb99ce3ce66981b81d3ba41bf287b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/models/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# noqa: D104 diff --git a/stf/stf-api-alternative/pytriton/pytriton/models/manager.py b/stf/stf-api-alternative/pytriton/pytriton/models/manager.py deleted file mode 100644 index adb22b7b51c74be7dcb568c4224dc04e16acdba5..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/models/manager.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""ModelManager class. - -The ModelManager is responsible for maintaining the models that has to be server on Triton Inference Server. - - Examples of use: - manager = ModelManager(model_repository) - manager.add_model(model) - - manager.create_models() -""" -import contextlib -import json -import logging -import socket -from typing import Dict, Iterable, Tuple - -from pytriton.client import ModelClient -from pytriton.client.utils import create_client_from_url, wait_for_server_ready -from pytriton.constants import CREATE_TRITON_CLIENT_TIMEOUT_S, DEFAULT_TRITON_STARTUP_TIMEOUT_S -from pytriton.exceptions import PyTritonInvalidOperationError -from pytriton.models.model import Model - -LOGGER = logging.getLogger(__name__) - - -class ModelManager: - """ModelManager class for maintaining Triton models.""" - - def __init__( - self, - triton_url: str, - ): - """Create ModelManager object. - - Args: - triton_url: Triton server URL - """ - self._triton_url = triton_url - self._models: Dict[Tuple[str, int], Model] = {} - - @property - def models(self) -> Iterable[Model]: - """List models added to manage. - - Returns: - List with models added to ModelManager. - """ - return self._models.values() - - def add_model(self, model: Model, load_model: bool = False) -> None: - """Add model to manage. - - Args: - model: Model instance - load_model: If True, model will be loaded to Triton server. - """ - key = self._format_key(model) - if key in self._models: - raise PyTritonInvalidOperationError("Cannot add model with the same name twice.") - - LOGGER.debug(f"Adding {model.model_name} ({model.model_version}) to registry under {key}.") - self._models[key] = model - - if load_model: - self._load_model(model) - - def load_models(self) -> None: - """Load bound models to Triton server.""" - for model in self._models.values(): - if not model.is_alive(): - self._load_model(model) - - def clean(self) -> None: - """Clean the model and internal registry.""" - with contextlib.closing( - create_client_from_url(self._triton_url, network_timeout_s=CREATE_TRITON_CLIENT_TIMEOUT_S) - ) as client: - server_live = False - try: - server_live = client.is_server_live() - # TimeoutError and ConnectionRefusedError are derived from OSError so they are redundant here - # OSError is raised from gevent/_socketcommon.py:590 sometimes, when server is not ready - except (socket.timeout, OSError): - pass - except Exception as ex: - LOGGER.error(f"Unexpected exception during server live check: {ex}") - raise ex - - for name, model in self._models.items(): - LOGGER.debug(f"Clean model {name}.") - model.clean() - if server_live: - client.unload_model(model.model_name) - - if server_live: - wait_for_server_ready(client, timeout_s=DEFAULT_TRITON_STARTUP_TIMEOUT_S) - - self._models.clear() - - def _format_key(self, model: Model) -> Tuple[str, int]: - key = (model.model_name.lower(), model.model_version) - return key - - def _load_model(self, model: Model): - """Prepare model config and required files dict and load model to Triton server.""" - LOGGER.debug(f"Crating model {model.model_name} with version {model.model_version}.") - model.setup() - config = json.dumps(model.get_model_config()) - files = model.get_proxy_model_files() - with ModelClient( - url=self._triton_url, model_name=model.model_name, model_version=str(model.model_version) - ) as client: - client.wait_for_server(timeout_s=DEFAULT_TRITON_STARTUP_TIMEOUT_S) - client.load_model(config=config, files=files) - LOGGER.debug("Done.") diff --git a/stf/stf-api-alternative/pytriton/pytriton/models/model.py b/stf/stf-api-alternative/pytriton/pytriton/models/model.py deleted file mode 100644 index 0d40241bf44d602e8a2f58522baaef8af59e3b52..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/models/model.py +++ /dev/null @@ -1,341 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Model base class.""" -import base64 -import copy -import enum -import json -import logging -import multiprocessing -import pathlib -import shutil -import threading -import typing -from typing import Callable, Optional, Sequence, Union - -import zmq - -from pytriton.decorators import TritonContext -from pytriton.exceptions import PyTritonValidationError -from pytriton.model_config.generator import ModelConfigGenerator -from pytriton.model_config.model_config import ModelConfig -from pytriton.model_config.tensor import Tensor -from pytriton.model_config.triton_model_config import DeviceKind, ResponseCache, TensorSpec, TritonModelConfig -from pytriton.proxy.inference_handler import InferenceHandler, InferenceHandlerEvent -from pytriton.proxy.validators import TritonResultsValidator -from pytriton.utils.workspace import Workspace - -LOGGER = logging.getLogger(__name__) - - -class ModelEvent(enum.Enum): - """Represents model event.""" - - RUNTIME_TERMINATING = "runtime-terminating" - RUNTIME_TERMINATED = "runtime-terminated" - - -ModelEventsHandler = typing.Callable[["Model", ModelEvent, typing.Optional[typing.Any]], None] - - -def _inject_triton_context(triton_context: TritonContext, model_callable: Callable) -> Callable: - """Inject triton context into callable. - - Args: - triton_context: Triton context - model_callable: Callable to inject triton context - - Returns: - Callable with injected triton context - """ - if hasattr(model_callable, "__self__"): - model_callable.__self__.__triton_context__ = triton_context - else: - model_callable.__triton_context__ = triton_context - return model_callable - - -class Model: - """Model definition.""" - - SCRIPT_FILES_TO_COPY = ["model.py", "communication.py", "types.py"] - - def __init__( - self, - model_name: str, - model_version: int, - inference_fn: Union[Callable, Sequence[Callable]], - inputs: Sequence[Tensor], - outputs: Sequence[Tensor], - config: ModelConfig, - workspace: Workspace, - triton_context: TritonContext, - strict: bool, - ): - """Create Python model with required data. - - Args: - model_name: Model name - model_version: Model version - inference_fn: Inference handler (function or lambda) - inputs: Model inputs definition - outputs: Model outputs definition - config: model configuration parameters - workspace: workspace for storing artifacts - strict: Enable strict validation of model outputs - - Raises: - PyTritonValidationError if one or more of provided values are incorrect. - """ - self.triton_context = triton_context - self.model_name = model_name - self.model_version = model_version - self._inference_handlers = [] - self.zmq_context = zmq.Context() - self._observers_lock = threading.Lock() - self._inference_handlers_lock = threading.Lock() - self._strict = strict - - self.infer_functions = [inference_fn] if isinstance(inference_fn, Callable) else inference_fn - if not isinstance(self.infer_functions, (Sequence, Callable)): - raise PyTritonValidationError("inference_fn has to be either callable or sequence of callables") - - self.inputs = inputs - self.outputs = outputs - - if any(output.optional for output in self.outputs): - raise PyTritonValidationError("Output tensors cannot be optional.") - - self.config = config - self._workspace = workspace - ipc_socket_path = self._workspace.path / f"ipc_proxy_backend_{model_name}" - self._shared_memory_socket = f"ipc://{ipc_socket_path.as_posix()}" - self._data_store_socket = self._workspace.path / "data_store.sock" - self._handshake_thread: Optional[threading.Thread] = None - self._shutdown_event = threading.Event() - self._triton_model_config: Optional[TritonModelConfig] = None - self._model_events_observers: typing.List[ModelEventsHandler] = [] - - def get_model_config(self) -> dict: - """Get model config. - - Returns: - Dictionary with model config - """ - triton_model_config = self._get_triton_model_config() - generator = ModelConfigGenerator(config=triton_model_config) - return generator.get_config() - - def get_proxy_model_files(self) -> typing.Dict[str, bytes]: - """Get proxy model files. - - Returns: - Dictionary with model files to be copied to Triton model store on server side: - key: file path in following format - 'file:{model_version}/{file_name}' - value: file content as bytes - """ - proxy_model_files_dict = {} - proxy_path = pathlib.Path(__file__).parent.parent / "proxy" - for file_to_copy in self.SCRIPT_FILES_TO_COPY: - src_file_path = proxy_path / file_to_copy - with open(src_file_path, "rb") as f: - src_file = f.read() - proxy_model_files_dict[f"file:{self.model_version}/{file_to_copy}"] = src_file - - return proxy_model_files_dict - - def generate_model(self, model_repository: pathlib.Path) -> None: - """Generate model and its config in the model repository. - - Args: - model_repository: Path to Triton model repository - - Raises: - OSError: when model repository not exists - """ - LOGGER.debug( - f"Generating model and config for {self.model_name} and {self.model_version} to {model_repository}" - ) - - model_catalog = model_repository / self.model_name - - config_file_path = model_catalog / "config.pbtxt" - if config_file_path.exists(): - LOGGER.warning(f"The config file {config_file_path} is going to be overridden.") - - triton_model_config = self._get_triton_model_config() - generator = ModelConfigGenerator(config=triton_model_config) - generator.to_file(config_file_path) - - model_version_catalog = model_catalog / str(self.model_version) - model_version_catalog.mkdir(exist_ok=True, parents=True) - - proxy_path = pathlib.Path(__file__).parent.parent / "proxy" - - for script_file in self.SCRIPT_FILES_TO_COPY: - src_file_path = proxy_path / script_file - dst_file_path = model_version_catalog / script_file - shutil.copy(src_file_path, dst_file_path) - - def setup(self) -> None: - """Create deployments and bindings to Triton Inference Server.""" - with self._inference_handlers_lock: - if not self._inference_handlers: - triton_model_config = self._get_triton_model_config() - validator = TritonResultsValidator(triton_model_config, self._strict) - for i, infer_function in enumerate(self.infer_functions): - self.triton_context.model_configs[infer_function] = copy.deepcopy(triton_model_config) - _inject_triton_context(self.triton_context, infer_function) - inference_handler = InferenceHandler( - model_callable=infer_function, - model_config=triton_model_config, - shared_memory_socket=f"{self._shared_memory_socket}_{i}", - data_store_socket=self._data_store_socket.as_posix(), - zmq_context=self.zmq_context, - validator=validator, - ) - inference_handler.on_proxy_backend_event(self._on_proxy_backend_event) - inference_handler.start() - self._inference_handlers.append(inference_handler) - self._handshake_thread = threading.Thread(target=self._model_proxy_handshake, daemon=True) - self._handshake_thread.start() - - def clean(self) -> None: - """Post unload actions to perform on model.""" - self._shutdown_event.set() - if self._handshake_thread is not None: - self._handshake_thread.join() - self._handshake_thread = None - - with self._observers_lock: - LOGGER.debug("Clearing model events observers") - self._model_events_observers.clear() - LOGGER.debug("Closing socket if needed") - if self.zmq_context is not None: - self.zmq_context.term() - LOGGER.debug("Socket closed. Waiting for proxy backend to shut down") - with self._inference_handlers_lock: - for inference_handler in self._inference_handlers: - inference_handler.stop() - LOGGER.debug("All backends ") - self._inference_handlers.clear() - - def is_alive(self) -> bool: - """Validate if model is working on Triton. - - If model is fully loaded by Triton, return True. Otherwise, perform a custom verification. - - Returns: - True if model is working, False otherwise - """ - with self._inference_handlers_lock: - if not self._inference_handlers: - return False - - for inference_handler in self._inference_handlers: - if not inference_handler.is_alive(): - return False - - return True - - def _get_triton_model_config(self) -> TritonModelConfig: - """Generate ModelConfig from descriptor and custom arguments for Python model. - - Returns: - ModelConfig object with configuration for Python model deployment - """ - if not self._triton_model_config: - triton_model_config = TritonModelConfig( - model_name=self.model_name, - model_version=self.model_version, - batching=self.config.batching, - batcher=self.config.batcher, - max_batch_size=self.config.max_batch_size, - decoupled=self.config.decoupled, - backend_parameters={"shared-memory-socket": self._shared_memory_socket}, - instance_group={DeviceKind.KIND_CPU: len(self.infer_functions)}, - ) - inputs = [] - for idx, input_spec in enumerate(self.inputs, start=1): - input_name = input_spec.name if input_spec.name else f"INPUT_{idx}" - tensor = TensorSpec( - name=input_name, dtype=input_spec.dtype, shape=input_spec.shape, optional=input_spec.optional - ) - inputs.append(tensor) - - outputs = [] - for idx, output_spec in enumerate(self.outputs, start=1): - output_name = output_spec.name if output_spec.name else f"OUTPUT_{idx}" - tensor = TensorSpec(name=output_name, dtype=output_spec.dtype, shape=output_spec.shape) - outputs.append(tensor) - - triton_model_config.inputs = inputs - triton_model_config.outputs = outputs - - if self.config.response_cache: - triton_model_config.response_cache = ResponseCache(enable=True) - - self._triton_model_config = triton_model_config - - return self._triton_model_config - - def _model_proxy_handshake(self) -> None: - socket = self.zmq_context.socket(zmq.REP) - socket.bind(self._shared_memory_socket) - try: - for i in range(len(self.infer_functions)): - while not self._shutdown_event.is_set(): - ready_to_read, _, _ = zmq.select([socket], [], [], 0.1) - if not ready_to_read: - continue - - socket.recv() - authkey = multiprocessing.current_process().authkey - instance_data = { - "shared-memory-socket": f"{self._shared_memory_socket}_{i}", - "data-store-socket": self._data_store_socket.as_posix(), - "auth-key": base64.b64encode(authkey).decode("utf-8"), - } - json_payload = json.dumps(instance_data) - socket.send_string(json_payload) - break - except Exception as exception: - LOGGER.error("Internal proxy backend error. It will be closed.") - LOGGER.exception(exception) - finally: - LOGGER.debug("Closing handshake socket") - socket_close_timeout_s = 0 - socket.close(linger=socket_close_timeout_s) - - def on_model_event(self, model_event_handle_fn: ModelEventsHandler): - """Register ModelEventsHandler callable. - - Args: - model_event_handle_fn: function to be called when model events arises - """ - with self._observers_lock: - self._model_events_observers.append(model_event_handle_fn) - - def _notify_model_events_observers(self, event: ModelEvent, context: typing.Any): - with self._observers_lock: - for model_event_handle_fn in self._model_events_observers: - model_event_handle_fn(self, event, context) - - def _on_proxy_backend_event( - self, proxy_backend: InferenceHandler, event: InferenceHandlerEvent, context: typing.Optional[typing.Any] = None - ): - if event == InferenceHandlerEvent.UNRECOVERABLE_ERROR: - self._notify_model_events_observers(ModelEvent.RUNTIME_TERMINATING, context) - elif event == InferenceHandlerEvent.FINISHED: - self._notify_model_events_observers(ModelEvent.RUNTIME_TERMINATED, context) diff --git a/stf/stf-api-alternative/pytriton/pytriton/proxy/__init__.py b/stf/stf-api-alternative/pytriton/pytriton/proxy/__init__.py deleted file mode 100644 index 8010bd32129eb99ce3ce66981b81d3ba41bf287b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/proxy/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# noqa: D104 diff --git a/stf/stf-api-alternative/pytriton/pytriton/proxy/communication.py b/stf/stf-api-alternative/pytriton/pytriton/proxy/communication.py deleted file mode 100644 index 2dda7c678e61984d77700bebf22c3fe4b20bb095..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/proxy/communication.py +++ /dev/null @@ -1,824 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Communication utility module. - -It is used for interaction between model and proxy_backend. -""" - -import atexit -import ctypes -import ctypes.util -import dataclasses -import fcntl -import gc -import json -import logging -import math -import multiprocessing.managers -import multiprocessing.popen_spawn_posix -import multiprocessing.shared_memory -import pathlib -import signal -import struct -import threading -import time -import uuid -import weakref -from typing import Dict, List, Literal, Optional, Sequence, Tuple, Union - -import numpy as np - -LOGGER = logging.getLogger(__name__) - - -# copy from -# https://github.com/triton-inference-server/python_backend/blob/main/src/resources/triton_python_backend_utils.py - - -def _serialize_byte_tensor(tensor) -> bytes: - """Serializes a bytes tensor into a flat numpy array of length prepended bytes. - - The numpy array should use dtype of np.object_. For np.bytes_, - numpy will remove trailing zeros at the end of byte sequence and because - of this it should be avoided. - - Args: - input_tensor: The bytes tensor to serialize. - - Returns: - serialized array as bytes buffer. - - Raises: - UnicodeEncodeErrors: raised when try to cast to string of non-bytes items fails - """ - if tensor.size == 0: - return b"" - - # If the input is a tensor of string/bytes objects, then must flatten those - # into a 1-dimensional array containing the 4-byte byte size followed by the - # actual element bytes. All elements are concatenated together in "C" order. - assert (tensor.dtype == np.object_) or (tensor.dtype.type == np.bytes_) - flattened_ls = [] - total_len = 0 - for obj in np.nditer(tensor, flags=["refs_ok"], order="C"): - # If directly passing bytes to BYTES type, - # don't convert it to str as Python will encode the - # bytes which may distort the meaning - if tensor.dtype == np.object_ and type(obj.item()) != bytes: - s = str(obj.item()).encode("utf-8") - else: - s = obj.item() - item_len = len(s) - flattened_ls.append(struct.pack(" np.ndarray: - """Deserializes an encoded bytes tensor into an numpy array of dtype of python objects. - - Args: - encoded_tensor : The encoded bytes tensor where each element has its length in - first 4 bytes followed by the content - dtype: The dtype of the numpy array to deserialize to. - order: The order of the numpy array to deserialize to. - - Returns: - The 1-D numpy array of type object containing the deserialized bytes in 'C' order. - """ - strs = [] - offset = 0 - val_buf = encoded_tensor - val_len = struct.unpack_from(" bytes: - header_format = _PARTIAL_HEADER_FORMAT + "Q" * len(shape) - dtype_descr = np.lib.format.dtype_to_descr(dtype) - assert ( - len(dtype_descr) <= _MAX_DTYPE_DESCR - ), f"dtype descr is too long; dtype_descr={dtype_descr} max={_MAX_DTYPE_DESCR}" - return struct.pack(header_format, dtype_descr.encode("utf-8"), order.encode("ascii"), len(shape), *shape) - - -def _unpack_header(header: bytes) -> Tuple[Tuple[int, ...], np.dtype, Literal["C", "F"]]: - shape_offset = struct.calcsize(_PARTIAL_HEADER_FORMAT) - dtype_descr, order, ndim = struct.unpack_from(_PARTIAL_HEADER_FORMAT, header, offset=0) - shape = struct.unpack_from("Q" * ndim, header, offset=shape_offset) - dtype = np.lib.format.descr_to_dtype(dtype_descr.decode("utf-8").rstrip("\x00")) - order = order.decode("ascii") - return shape, dtype, order - - -def serialize_numpy_with_struct_header(tensor: np.ndarray) -> List[Union[bytes, memoryview]]: - """Serialize numpy array to list of bytes and memoryviews. - - Args: - tensor: numpy array to serialize - - Returns: - List of data frames in form of bytes and memoryviews - """ - if tensor.dtype.hasobject: - data = _serialize_byte_tensor(tensor.ravel()) - order = "C" # as _serialize_byte_tensor returns C-ordered array - else: - if not tensor.data.contiguous: - tensor = np.ascontiguousarray(tensor) - data = tensor.data - order = "C" if tensor.flags.c_contiguous else "F" - - header = _pack_header(tensor.shape, tensor.dtype, order) - frames = [header, data] - return frames - - -def deserialize_numpy_with_struct_header(frames: List[Union[bytes, memoryview]]) -> np.ndarray: - """Deserialize numpy array from list of bytes and memoryviews. - - Args: - frames: List of data frames in form of bytes and memoryviews - - Returns: - numpy array - """ - header, data = frames - shape, dtype, order = _unpack_header(header) - if dtype.hasobject: - tensor = _deserialize_bytes_tensor(data, dtype).reshape(shape) - else: - tensor = np.ndarray(shape, dtype=dtype, buffer=data, order=order) - return tensor - - -def calc_serialized_size_of_numpy_with_struct_header(tensor: np.ndarray) -> List[int]: - """Calculate size of serialized numpy array. - - Args: - tensor: numpy array to serialize - - Returns: - List of sizes of data frames - """ - header_size = struct.calcsize(_PARTIAL_HEADER_FORMAT) + struct.calcsize("Q") * len(tensor.shape) - if tensor.dtype.hasobject: - items_sizes = [] - order = "C" if tensor.flags.c_contiguous else "F" - for obj in np.nditer(tensor, flags=["refs_ok"], order=order): - if tensor.dtype == np.object_ and type(obj.item()) != bytes: - s = str(obj.item()).encode("utf-8") - else: - s = obj.item() - items_sizes.append(len(s)) - - # total_size + for size of each item + each item - data_size = struct.calcsize(" "InferenceHandlerRequests": - """Reconstruct InferenceHandlerRequests object from bytes. - - Args: - content: bytes to parse - """ - requests = json.loads(content) - return cls( - requests=[ - MetaRequestResponse( - idx=request.get("idx"), - data=request.get("data", {}), - parameters=request.get("parameters"), - ) - for request in requests["requests"] - ] - ) - - def as_bytes(self) -> bytes: - """Serializes InferenceHandlerRequests object to bytes.""" - requests = { - "requests": [ - { - "idx": request.idx, - "data": request.data, - "parameters": request.parameters, - } - for request in self.requests - ] - } - return json.dumps(requests).encode("utf-8") - - -@dataclasses.dataclass -class InferenceHandlerResponses: - """Object transferred from callback handler containing output data.""" - - responses: Optional[List[MetaRequestResponse]] = None - error: Optional[str] = None - - @classmethod - def from_bytes(cls, content: bytes) -> "InferenceHandlerResponses": - """Reconstruct InferenceHandlerResponses object from bytes. - - Args: - content: bytes to parse - """ - responses = json.loads(content) - return cls( - responses=[ - MetaRequestResponse(idx=response.get("idx"), data=response.get("data", {}), eos=response.get("eos")) - for response in responses.get("responses", []) - ], - error=responses.get("error"), - ) - - def as_bytes(self) -> bytes: - """Serializes InferenceHandlerResponses object to bytes.""" - result = {"error": self.error} - if self.responses: - result["responses"] = [ - {"idx": response.idx, "data": response.data, "eos": response.eos} for response in self.responses - ] - return json.dumps(result).encode("utf-8") - - -@dataclasses.dataclass -class BlockDescriptor: - """Descriptor of block in shared memory.""" - - shm_name: str - offset: int - size: Optional[int] = None - - def __post_init__(self): - """Initialize other attributes.""" - self.id = f"{self.shm_name}:{self.offset}" - - @classmethod - def from_id(cls, tensor_id: str): - """Create BlockDescriptor from dict.""" - shm_name, offset = tensor_id.split(":") - return cls(shm_name, int(offset)) - - -class _SharedMemorySegment: - def __init__(self, size): - self.shared_memory = multiprocessing.shared_memory.SharedMemory(create=True, size=size) - multiprocessing.util.debug(f"Created {self.shared_memory.name} of size {self.shared_memory.size}") - self.used_blocks: List[BlockDescriptor] = [] - self.used_blocks_lock = threading.RLock() - self.free_blocks = [BlockDescriptor(self.shared_memory.name, offset=0, size=size)] - self.max_free_block_size = size - - def _update_free_blocks(self): - total_size = self.shared_memory.size - free_blocks = [] - offset = 0 - - with self.used_blocks_lock: - # find holes between used blocks - for used_block in self.used_blocks: - if used_block.offset > offset: - free_blocks.append( - BlockDescriptor(self.shared_memory.name, offset=offset, size=used_block.offset - offset) - ) - offset = used_block.offset + used_block.size - # if tail is free - if offset < total_size: - free_blocks.append(BlockDescriptor(self.shared_memory.name, offset=offset, size=total_size - offset)) - - self.free_blocks = free_blocks - self.max_free_block_size = max(block.size for block in self.free_blocks) if self.free_blocks else 0 - - def __contains__(self, block_id: str) -> bool: - with self.used_blocks_lock: - return any(block_id == block.id for block in self.used_blocks) # pytype: disable=attribute-error - - def __getitem__(self, block_id: str) -> BlockDescriptor: - with self.used_blocks_lock: - for block in self.used_blocks: - if block.id == block_id: # pytype: disable=attribute-error - return block - raise KeyError(f"Block with id {block_id} not found in segment {self.shared_memory.name}") - - def allocate(self, offset, byte_size): - block = BlockDescriptor(self.shared_memory.name, offset=offset, size=byte_size) - with self.used_blocks_lock: - self.used_blocks.append(block) - self.used_blocks.sort(key=lambda block: block.offset) - self._update_free_blocks() - return block - - def release(self, block: BlockDescriptor): - with self.used_blocks_lock: - self.used_blocks.remove(block) - self._update_free_blocks() - - -class _DataBlocksServer: - _instance = None - _cnt = 0 - _minimal_segment_size = 4096 # 4KB - - def __new__(cls): - if cls._instance is None: - cls._instance = super().__new__(cls) - return cls._instance - - def __init__(self): - # WAR: for some reason, the __init__ is called on each create of proxy object - if self._cnt == 1: - return - self._cnt += 1 - self._id = uuid.uuid4() # to verify that it is singleton across processes - self._segments = [] - self._segments_lock = threading.RLock() - atexit.register(self.close) - - def get_free_blocks(self, bytes_sizes: Sequence[int]) -> Sequence[str]: - tensors_ids = [] - with self._segments_lock: - for byte_size in bytes_sizes: - for segment in self._segments: - if segment.max_free_block_size >= byte_size: - for free_block in segment.free_blocks: - if free_block.size >= byte_size: - block = self._allocate_block(segment, free_block.offset, byte_size) - tensors_ids.append(block.id) # pytype: disable=attribute-error - break - else: - continue # If no suitable block was found, try the next segment - break # If a suitable block was found, don't try any more segments - else: # If no suitable block was found in any segment - new_segment_size = int( - max(self._minimal_segment_size, math.pow(2, math.ceil(math.log2(byte_size)))) - ) - block = self._allocate_block( - self._create_new_segment(new_segment_size), offset=0, byte_size=byte_size - ) - tensors_ids.append(block.id) # pytype: disable=attribute-error - return tensors_ids - - def release_block(self, block_id: str): - with self._segments_lock: - for segment in self._segments: - try: - block = segment[block_id] - segment.release(block) - return - except KeyError: - pass - raise KeyError(f"Block with id {block_id} not found in server") - - def _allocate_block(self, segment: _SharedMemorySegment, offset: int, byte_size: int) -> BlockDescriptor: - return segment.allocate(offset, byte_size) - - def _create_new_segment(self, segment_size): - segment = _SharedMemorySegment(segment_size) - self._segments.append(segment) - return segment - - def _get_debug_status(self): - return { - "server_id": str(self._id), - "host_pid": multiprocessing.current_process().pid, - "segments": [ - { - "shared_memory": segment.shared_memory.name, - "used_blocks": [str(block) for block in segment.used_blocks], - } - for segment in self._segments - ], - } - - def close(self): - multiprocessing.util.debug(f"Closing server {self._id}") - with self._segments_lock: - while self._segments: - segment = self._segments.pop() - multiprocessing.util.debug(f"Closing and delete segment {segment.shared_memory.name}") - segment.shared_memory.close() - segment.shared_memory.unlink() - - -class BlocksStoreManager(multiprocessing.managers.BaseManager): - """Remote block store for storing and retrieving numpy arrays in/from shared memory.""" - - @classmethod - def _run_server(cls, registry, address, authkey, serializer, writer, initializer=None, initargs=()): - PR_SET_PDEATHSIG = 1 # noqa - libc = ctypes.CDLL(ctypes.util.find_library("c"), use_errno=True) - libc.prctl(PR_SET_PDEATHSIG, signal.SIGTERM) # terminate process when parent **thread** dies - super()._run_server( - registry, address, authkey, serializer, writer, initializer, initargs - ) # pytype: disable=attribute-error - - -class _DataBlocksServerProxy(multiprocessing.managers.BaseProxy): - def release_block(self, /, *args, **kwargs): - return self._callmethod("release_block", args, kwargs) - - def get_free_blocks(self, /, *args, **kwargs): - return self._callmethod("get_free_blocks", args, kwargs) - - def _get_debug_status(self, /, *args, **kwargs): - return self._callmethod("_get_debug_status", args, kwargs) - - def close(self, /, *args, **kwargs): - return self._callmethod("close", args, kwargs) - - -BlocksStoreManager.register("blocks", _DataBlocksServer, proxytype=_DataBlocksServerProxy) - - -class _FileLock: - _locks = {} - - def __new__(cls, file_path): - if file_path not in cls._locks: - cls._locks[file_path] = super().__new__(cls) - return cls._locks[file_path] - - def __init__(self, file_path): - if hasattr(self, "_file_path"): - return - self._file_path = pathlib.Path(file_path) - self._file_lock = None - self._lock = threading.RLock() - atexit.register(self._clean) - - def __enter__(self): - self._file_lock = self._file_path.open("a") - fcntl.flock(self._file_lock.fileno(), fcntl.LOCK_EX) - self._lock.acquire() - - def __exit__(self, exc_type, exc_value, traceback): - fcntl.flock(self._file_lock.fileno(), fcntl.LOCK_UN) - self._lock.release() - - def _clean(self): - if self._file_lock is not None: - self._file_lock.close() - try: - self._file_path.unlink(missing_ok=True) - except OSError as e: - LOGGER.warning(f"Could not remove lock file {self._file_path}; {e}") - - -class _Popen(multiprocessing.popen_spawn_posix.Popen): - def _launch(self, process_obj): - # Modified version of multiprocessing.popen_spawn_posix.Popen._launch - import io - import os - from multiprocessing import context, resource_tracker, spawn, util - - tracker_fd = resource_tracker.getfd() - self._fds.append(tracker_fd) # pytype: disable=attribute-error - - # get prep_data + remove init_main_from* as they are not required for TensorStore process - prep_data = spawn.get_preparation_data(process_obj._name) - prep_data.pop("init_main_from_module", None) - prep_data.pop("init_main_from_path", None) - - fp = io.BytesIO() - context.set_spawning_popen(self) - try: - context.reduction.dump(prep_data, fp) # pytype: disable=module-attr - context.reduction.dump(process_obj, fp) # pytype: disable=module-attr - finally: - context.set_spawning_popen(None) - - parent_r = child_w = child_r = parent_w = None - try: - parent_r, child_w = os.pipe() - child_r, parent_w = os.pipe() - cmd = spawn.get_command_line(tracker_fd=tracker_fd, pipe_handle=child_r) - self._fds.extend([child_r, child_w]) # pytype: disable=attribute-error - self.pid = util.spawnv_passfds( - spawn.get_executable(), cmd, self._fds # pytype: disable=attribute-error,wrong-arg-types - ) - self.sentinel = parent_r - with open(parent_w, "wb", closefd=False) as f: - f.write(fp.getbuffer()) - finally: - fds_to_close = [] - for fd in (parent_r, parent_w): - if fd is not None: - fds_to_close.append(fd) - self.finalizer = util.Finalize(self, util.close_fds, fds_to_close) # pytype: disable=module-attr - - for fd in (child_r, child_w): - if fd is not None: - os.close(fd) - - -class _SpawnProcess(multiprocessing.process.BaseProcess): - _start_method = "spawn" - - @staticmethod - def _Popen(process_obj): # noqa N802 - return _Popen(process_obj) - - -class _SpawnContext(multiprocessing.context.BaseContext): - _name = "spawn" - Process = _SpawnProcess - - -class TensorStore: - """Tensor store for storing and retrieving numpy arrays in/from shared memory.""" - - _SOCKET_EXISTANCE_CHECK_INTERVAL_S = 0.1 - _instances = {} - - def __new__(cls, *args, **kwargs): - """Create TensorStore object. If object with given address already exists, return it.""" - if args: - address = args[0] - elif "address" in kwargs: - address = kwargs["address"] - else: - raise TypeError("TensorStore() missing 1 required positional argument: 'address'") - - address = address.as_posix() if isinstance(address, pathlib.Path) else address - - if address not in cls._instances: - cls._instances[address] = super().__new__(cls) - - return cls._instances[address] - - def __init__(self, address: Union[str, pathlib.Path], auth_key: Optional[bytes] = None): - """Initialize TensorStore object. - - Args: - address: address of data store - auth_key: authentication key required to setup connection. If not provided, current process authkey will be used - """ - if not hasattr(self, "_remote_blocks_store_manager"): - address = address.as_posix() if isinstance(address, pathlib.Path) else address - self._remote_blocks_store_manager = BlocksStoreManager(address, authkey=auth_key, ctx=_SpawnContext()) - self._remote_blocks_store = None - self._manager_start_stop_filelock = _FileLock(f"{address}.lock") - - # container for keeping map between tensor_id and numpy array weak ref - self._handled_blocks: Dict[str, weakref.ReferenceType] = {} - self._handled_blocks_lock = threading.RLock() - - self._shm_segments: Dict[str, multiprocessing.shared_memory.SharedMemory] = {} - self._shm_segments_lock = threading.RLock() - - self.serialize = serialize_numpy_with_struct_header - self.deserialize = deserialize_numpy_with_struct_header - self._calc_serialized_tensor_size = calc_serialized_size_of_numpy_with_struct_header - - @property - def address(self) -> str: - """Return address of remote block store.""" - return self._remote_blocks_store_manager.address - - def start(self): - """Start remote block store.""" - with self._manager_start_stop_filelock: - if self._remote_blocks_store is not None: - raise RuntimeError("Remote block store is already started/connected") - - self._remote_blocks_store_manager.start() - self._remote_blocks_store = self._remote_blocks_store_manager.blocks() # pytype: disable=attribute-error - - address = pathlib.Path(self._remote_blocks_store_manager.address) - self._wait_for_address(address) - LOGGER.debug( - f"Started remote block store at {address} (pid={self._remote_blocks_store_manager._process.pid})" # pytype: disable=attribute-error - ) - - def connect(self, timeout_s: Optional[float] = None): - """Connect to remote block store.""" - if self._remote_blocks_store is None: - address = pathlib.Path(self._remote_blocks_store_manager.address) - - self._wait_for_address(address, timeout_s) - self._remote_blocks_store_manager.connect() - self._remote_blocks_store = self._remote_blocks_store_manager.blocks() # pytype: disable=attribute-error - LOGGER.debug(f"Connected to remote block store at {address})") - else: - LOGGER.debug(f"Already connectd to remote block store at {self.address}") - - def _wait_for_address(self, address, timeout_s: Optional[float] = None): - should_stop_at = time.time() + timeout_s if timeout_s is not None else None - if timeout_s is not None and self._SOCKET_EXISTANCE_CHECK_INTERVAL_S > timeout_s: - socket_existance_check_interval = timeout_s - else: - socket_existance_check_interval = self._SOCKET_EXISTANCE_CHECK_INTERVAL_S - - while not address.exists(): - if should_stop_at is not None and time.time() >= should_stop_at: - raise TimeoutError(f"Timeout while waiting for {address} to be created") - time.sleep(socket_existance_check_interval) - - def _calc_serialized_size(self, tensor: np.ndarray) -> int: - # frames payload sum + total size + frames sizes - # assume 2 frames: header with tensor description + data - return sum(self._calc_serialized_tensor_size(tensor)) + struct.calcsize(" Sequence[str]: - """Append tensor to shared memory buffer. - - Args: - tensors: numpy arrays to store - - Returns: - List of ids of stored tensors - """ - byte_size_of_frames_containers = [self._calc_serialized_size(tensor) for tensor in tensors] - tensors_ids = self._remote_blocks_store.get_free_blocks(byte_size_of_frames_containers) - blocks = [BlockDescriptor.from_id(tensor_id) for tensor_id in tensors_ids] - - for tensor, block in zip(tensors, blocks): - with self._shm_segments_lock: - shm = self._shm_segments.get(block.shm_name) - if shm is None: - shm = multiprocessing.shared_memory.SharedMemory(block.shm_name, create=False) - self._shm_segments[block.shm_name] = shm - - frames = self.serialize(tensor) - self._copy_frames(frames, shm, block.offset) - - return tensors_ids - - def get(self, tensor_id: str) -> np.ndarray: - """Get numpy array from tensor store. - - Args: - tensor_id: id of of tenosr to get - - Returns: - numpy array - """ - tensor = None - # try to handle already handled tensor from weakref - with self._handled_blocks_lock: - tensor_ref = self._handled_blocks.get(tensor_id) - if tensor_ref is not None: - tensor = tensor_ref() - - if tensor is None: # if tensor was not handled yet or weakref is already empty - block = BlockDescriptor.from_id(tensor_id) - - # check if shm segment is already opened - with self._shm_segments_lock: - shm = self._shm_segments.get(block.shm_name) - - # if not open it and put into cache - if shm is None: - shm = multiprocessing.shared_memory.SharedMemory(block.shm_name, create=False) - with self._shm_segments_lock: - shm = self._shm_segments.setdefault(block.shm_name, shm) # in meantime other thread could create it - - frames = self._handle_frames(shm, block.offset) - tensor = self.deserialize(frames) - - # store tensor in weakref to be able to release shared memory when tensor will be garbage collected - with self._handled_blocks_lock: - tensor_ref = self._handled_blocks.setdefault(tensor_id, weakref.ref(tensor)) - tensor = tensor_ref() - - return tensor # pytype: disable=bad-return-type - - def release_block(self, tensor_id: str): - """Release shared memory block. - - Args: - tensor_id: id of tensor to release - """ - LOGGER.debug(f"Releasing shared memory block for tensor {tensor_id}") - - tensor_ref = None - with self._handled_blocks_lock: - tensor_ref = self._handled_blocks.pop(tensor_id, None) - - try: - if tensor_ref is not None: - self._remote_blocks_store.release_block(tensor_id) - except OSError: # thrown when remote process is already closed - LOGGER.warning( - f"Failed to release block {tensor_id} on remote process at {self.address}. Probably remote process is already closed" - ) - - def _copy_frames( - self, - frames: List[Union[bytes, memoryview]], - shm: multiprocessing.shared_memory.SharedMemory, - offset: int, - ) -> int: - total_size = struct.calcsize(" List[memoryview]: - frames = [] - (total_size,) = struct.unpack_from(" None: - """Start the InferenceHandler communication.""" - self.socket = self.zmq_context.socket(zmq.REP) - model_name = self._model_config.model_name - try: - LOGGER.debug(f"Binding IPC socket at {self.shared_memory_socket}.") - self.socket.bind(self.shared_memory_socket) - self._tensor_store.connect() - - while not self.stopped: - LOGGER.debug(f"Waiting for requests from proxy model for {model_name}.") - request_payload = self.socket.recv() - requests = InferenceHandlerRequests.from_bytes(request_payload).requests - - LOGGER.debug(f"Preparing inputs for {model_name}.") - inputs = [ - Request( - data={ - input_name: self._tensor_store.get(tensor_id) - for input_name, tensor_id in request.data.items() - }, - parameters=request.parameters, - ) - for request in requests - ] - - try: - LOGGER.debug(f"Processing inference callback for {model_name}.") - responses = self._model_callable(inputs) - - responses_iterator = _ResponsesIterator(responses, decoupled=self._model_config.decoupled) - for responses in responses_iterator: - LOGGER.debug(f"Validating outputs for {self._model_config.model_name}.") - self._validator.validate_responses(inputs, responses) - LOGGER.debug(f"Copying outputs to shared memory for {model_name}.") - output_arrays_with_coords = [ - (response_idx, output_name, tensor) - for response_idx, response in enumerate(responses) - for output_name, tensor in response.items() - ] - tensor_ids = self._tensor_store.put([tensor for _, _, tensor in output_arrays_with_coords]) - responses = [{} for _ in range(len(responses))] - for (response_idx, output_name, _), tensor_id in zip(output_arrays_with_coords, tensor_ids): - responses[response_idx][output_name] = tensor_id - - responses = InferenceHandlerResponses( - responses=[ - MetaRequestResponse(idx=idx, data=response, eos=False) - for idx, response in enumerate(responses) - ], - ) - LOGGER.debug(f"Sending response: {responses}") - self.socket.send(responses.as_bytes()) - self.socket.recv() # wait for ack - - responses = InferenceHandlerResponses( - responses=[MetaRequestResponse(idx=idx, eos=True) for idx in range(len(requests))] - ) - LOGGER.debug(f"Send eos response to proxy model for {model_name}.") - self.socket.send(responses.as_bytes()) - - except PyTritonUnrecoverableError: - error = traceback.format_exc() - responses = InferenceHandlerResponses(error=error) - LOGGER.error( - "Unrecoverable error thrown during calling model callable. " - "Shutting down Triton Inference Server. " - f"{error}" - ) - self.stopped = True - self._notify_proxy_backend_observers(InferenceHandlerEvent.UNRECOVERABLE_ERROR, error) - LOGGER.debug(f"Send response to proxy model for {model_name}.") - self.socket.send(responses.as_bytes()) - except Exception: - error = traceback.format_exc() - responses = InferenceHandlerResponses(error=error) - LOGGER.error(f"Error occurred during calling model callable: {error}") - self.socket.send(responses.as_bytes()) - finally: - for tensor_id in itertools.chain(*[request.data.values() for request in requests]): - self._tensor_store.release_block(tensor_id) - - except zmq.error.ContextTerminated: - LOGGER.info("Context was terminated. InferenceHandler will be closed.") - except Exception as exception: - LOGGER.error("Internal proxy backend error. InferenceHandler will be closed.") - LOGGER.exception(exception) - finally: - LOGGER.info("Closing socket") - socket_close_timeout_s = 0 - self.socket.close(linger=socket_close_timeout_s) - LOGGER.info("Closing TensorStore") - self._tensor_store.close() - - LOGGER.info("Leaving proxy backend thread") - self._notify_proxy_backend_observers(InferenceHandlerEvent.FINISHED, None) - - def stop(self) -> None: - """Stop the InferenceHandler communication.""" - LOGGER.info("Closing proxy") - self.stopped = True - self.join() - - def on_proxy_backend_event(self, proxy_backend_event_handle_fn: InferenceEventsHandler): - """Register InferenceEventsHandler callable. - - Args: - proxy_backend_event_handle_fn: function to be called when proxy backend events arises - """ - self._proxy_backend_events_observers.append(proxy_backend_event_handle_fn) - - def _notify_proxy_backend_observers(self, event: InferenceHandlerEvent, context: typing.Optional[typing.Any]): - for proxy_backend_event_handle_fn in self._proxy_backend_events_observers: - proxy_backend_event_handle_fn(self, event, context) diff --git a/stf/stf-api-alternative/pytriton/pytriton/proxy/model.py b/stf/stf-api-alternative/pytriton/pytriton/proxy/model.py deleted file mode 100644 index 29380bea1d9d0caf7e7a873dbc9ba8e72ae91a2d..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/proxy/model.py +++ /dev/null @@ -1,339 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Model definition for Python Backend. - -This file is automatically copied during deployment on Triton. -""" -import base64 -import itertools -import json -import logging -import multiprocessing -import traceback -import typing - -import triton_python_backend_utils as pb_utils # pytype: disable=import-error -import zmq # pytype: disable=import-error - -from . import communication -from .communication import InferenceHandlerRequests, InferenceHandlerResponses, MetaRequestResponse, TensorStore -from .types import Request - -LOGGER = logging.getLogger(__name__) - - -_ACK = b"" - - -def _update_loggers(): - def get_triton_backend_logger(): - try: - # https://github.com/triton-inference-server/python_backend/blob/main/src/pb_stub.cc#L1501 - logger = pb_utils.Logger # pytype: disable=module-attr - logger.error = logger.log_error - logger.warning = logger.log_warn - logger.info = logger.log_info - logger.debug = logger.log_verbose - # do not set log_to_stderr in Backend - except (ImportError, AttributeError): - logger = logging.getLogger("backend") - root_logger = logging.getLogger() - if root_logger.level <= logging.INFO: - multiprocessing.util.log_to_stderr(logging.INFO) - return logger - - logger = get_triton_backend_logger() - global LOGGER - LOGGER = logger - communication.LOGGER = logger - - -class _ResponsesIterator: - def __init__(self, socket, requests_number: int) -> None: - self._socket = socket - self._requests_eos = [False] * requests_number - - def __iter__(self): - return self - - def __next__(self): - responses_payload = self._socket.recv() - - meta_responses = InferenceHandlerResponses.from_bytes(responses_payload) - if meta_responses.error: - raise pb_utils.TritonModelException(meta_responses.error) # pytype: disable=module-attr - - for meta_response in meta_responses.responses: - self._requests_eos[meta_response.idx] |= meta_response.eos - - if all(self._requests_eos): - raise StopIteration() - else: - self._socket.send(_ACK) # send ack to receive further results - - return meta_responses - - -class _ResponsesSender: - def __init__(self, requests: typing.List): - self._requests = requests - self._responses = [] # typing.List[pb_utils.InferenceResponse] - - def send(self, responses: typing.List, eos: typing.Optional[typing.Sequence[bool]] = None): - assert len(responses) == len(self._requests) - # here all responses should be not None - self._responses.extend(responses) - - def finish(self): - to_send = self._responses - self._responses = [] - return to_send - - -class _DecoupledResponsesSender: - def __init__(self, requests: typing.List): - self._senders = [request.get_response_sender() for request in requests] - self._eos = [False] * len(requests) - - def send(self, responses: typing.List, eos: typing.Optional[typing.Sequence[typing.Optional[bool]]] = None): - assert len(responses) == len(self._senders) - eos = eos or [None] * len(self._senders) - for response_idx, (response, response_eos) in enumerate(zip(responses, eos)): - if response is None and not response_eos: - continue - - sender = self._senders[response_idx] - self._eos[response_idx] |= response_eos - flags = 0 - if response_eos: - flags |= pb_utils.TRITONSERVER_RESPONSE_COMPLETE_FINAL - if response is not None: - sender.send(response, flags=flags) - elif response_eos: - sender.send(flags=flags) - - def finish(self): - for idx, sender in enumerate(self._senders): - if not self._eos[idx]: - sender.send(flags=pb_utils.TRITONSERVER_RESPONSE_COMPLETE_FINAL) - return None - - -class TritonPythonModel: - """Triton PythonBackend model implementation for proxy.""" - - def __init__(self): - """Create TritonPythonModel object.""" - self.model_config = None - self.context = zmq.Context() - self.socket = self.context.socket(zmq.REQ) - - self.model_config = None - self.model_inputs = [] - self.model_outputs = [] - self.model_outputs_dict = {} - - self._tensor_store = None - self._last_response_ids = [] - - self._sender_cls = None - - def initialize(self, args): - """Triton Inference Server Python Backend API called only once when the model is being loaded. - - Allows the model to initialize any state associated with this model. - - Args: - args: Dictionary with both keys and values are strings. The dictionary keys and values are: - * model_config: A JSON string containing the model configuration - * model_instance_kind: A string containing model instance kind - * model_instance_device_id: A string containing model instance device ID - * model_repository: Model repository path - * model_version: Model version - * model_name: Model name - """ - _update_loggers() - try: - LOGGER.debug("Reading model config") - self.model_config = json.loads(args["model_config"]) - shared_memory_socket = self.model_config["parameters"]["shared-memory-socket"]["string_value"] - LOGGER.debug(f"Connecting to IPC socket at {shared_memory_socket}") - - instance_data = self._get_instance_data(shared_memory_socket) - self.socket.connect(instance_data["shared-memory-socket"]) - LOGGER.debug(f"Connected to socket {shared_memory_socket}.") - - self.model_inputs = self.model_config["input"] - self.model_outputs = self.model_config["output"] - self.model_outputs_dict = {output_def["name"]: output_def for output_def in self.model_outputs} - - LOGGER.debug(f"Model inputs: {self.model_inputs}") - LOGGER.debug(f"Model outputs: {self.model_outputs}") - - data_store_socket = instance_data["data-store-socket"] - auth_key = base64.b64decode(instance_data["auth-key"]) - - self._tensor_store = TensorStore(data_store_socket, auth_key) - self._tensor_store.connect() - self._last_response_ids = [] - - self._sender_cls = { - False: _ResponsesSender, - True: _DecoupledResponsesSender, - }[self.model_config.get("model_transaction_policy", {}).get("decoupled", False)] - - except Exception: - msg = traceback.format_exc() - raise pb_utils.TritonModelException(f"Model initialize error: {msg}") # pytype: disable=module-attr - - def execute(self, triton_requests): - """Triton Inference Server Python Backend API method. - - Args: - triton_requests: A list of pb_utils.InferenceRequest - - Returns: - A list of pb_utils.InferenceResponse. The length of this list is the same as `triton_requests` - """ - try: - meta_requests = self._put_requests_to_buffer(triton_requests) - LOGGER.debug(f"Sending requests {meta_requests}.") - self.socket.send(meta_requests.as_bytes()) - - responses_iterator = _ResponsesIterator(self.socket, len(triton_requests)) - responses_sender = self._sender_cls(triton_requests) - LOGGER.debug(f"using sender {responses_sender}") - for meta_responses in responses_iterator: - LOGGER.debug(f"Received response: {meta_responses}") - triton_responses = self._handle_responses_from_buffer(meta_responses, len(triton_requests)) - responses_sender.send( - triton_responses, eos=[meta_response.eos for meta_response in meta_responses.responses] - ) - - # TODO: fix leak on error - self._last_response_ids.extend( - itertools.chain( - *[ - meta_response.data.values() - for meta_response in meta_responses.responses - if meta_response.data is not None - ] - ) - ) - return responses_sender.finish() - except pb_utils.TritonModelException: # pytype: disable=module-attr - raise - except Exception: - msg = traceback.format_exc() - raise pb_utils.TritonModelException(f"Model execute error: {msg}") # pytype: disable=module-attr - - def finalize(self) -> None: - """Finalize the model cleaning the buffers.""" - LOGGER.debug("Finalizing backend instance.") - LOGGER.debug("Cleaning socket and context.") - socket_close_timeout_s = 0 - if self.socket: - self.socket.close(linger=socket_close_timeout_s) - if self.context: - self.context.term() - self.socket = None - self.context = None - - LOGGER.debug("Removing allocated shared memory.") - for tensor_id in self._last_response_ids: - self._tensor_store.release_block(tensor_id) - self._last_response_ids = [] - self._tensor_store.close() - self._tensor_store = None - - LOGGER.debug("Finalized.") - - @property - def model_supports_batching(self) -> bool: - """Return if model supports batching. - - Returns: - True if model support batching, False otherwise. - """ - return self.model_config["max_batch_size"] > 0 - - def _get_instance_data(self, shared_memory_socket) -> typing.Dict[str, str]: - handshake_socket = self.context.socket(zmq.REQ) - handshake_socket.connect(shared_memory_socket) - handshake_socket.send_string("get_instance_socket") - instance_data_payload = handshake_socket.recv() - handshake_socket.close() - instance_data = json.loads(instance_data_payload.decode("utf-8")) - instance_data_copy = instance_data.copy() - if "auth-key" in instance_data_copy: - instance_data_copy["auth-key"] = "***" - LOGGER.debug(f"Obtained instance data: {instance_data_copy}") - return instance_data - - def _put_requests_to_buffer(self, triton_requests) -> InferenceHandlerRequests: - while self._last_response_ids: - tensor_id = self._last_response_ids.pop() - self._tensor_store.release_block(tensor_id) - - LOGGER.debug("Collecting input data from request.") - - requests = [] - for triton_request in triton_requests: - request = {} - for model_input in self.model_inputs: - input_tensor = pb_utils.get_input_tensor_by_name(triton_request, model_input["name"]) - if input_tensor is not None: - request[model_input["name"]] = input_tensor.as_numpy() - requests.append(Request(data=request, parameters=json.loads(triton_request.parameters()))) - - input_arrays_with_coords = [ - (request_idx, input_name, tensor) - for request_idx, request in enumerate(requests) - for input_name, tensor in request.items() - ] - tensor_ids = self._tensor_store.put([tensor for *_, tensor in input_arrays_with_coords]) - requests_with_ids = [{} for _ in range(len(requests))] - for (request_idx, input_name, _), tensor_id in zip(input_arrays_with_coords, tensor_ids): - requests_with_ids[request_idx][input_name] = tensor_id - - return InferenceHandlerRequests( - requests=[ - MetaRequestResponse(idx=idx, data=request_with_ids, parameters=request.parameters) - for idx, (request, request_with_ids) in enumerate(zip(requests, requests_with_ids)) - ] - ) - - def _handle_responses_from_buffer( - self, meta_response: InferenceHandlerResponses, requests_number: int - ) -> typing.List: - def _get_array_and_wrap(output_name, tensor_id: str) -> pb_utils.Tensor: # pytype: disable=module-attr - output_array = self._tensor_store.get(tensor_id) - if output_name in self.model_outputs_dict: - dtype = pb_utils.triton_string_to_numpy(self.model_outputs_dict[output_name]["data_type"]) - output_array = output_array.astype(dtype) - return pb_utils.Tensor(output_name, output_array) # pytype: disable=module-attr - - responses = meta_response.responses - if len(responses) != requests_number: - raise pb_utils.TritonModelException( # pytype: disable=module-attr - f"Number of responses {len(responses)} does not match number of requests {requests_number}" - ) - triton_inference_responses = [None] * requests_number - for response in responses: - if response.data is None: - continue - triton_inference_responses[response.idx] = pb_utils.InferenceResponse( # pytype: disable=module-attr - [_get_array_and_wrap(output_name, tensor_id) for output_name, tensor_id in response.data.items()] - ) - return triton_inference_responses diff --git a/stf/stf-api-alternative/pytriton/pytriton/proxy/types.py b/stf/stf-api-alternative/pytriton/pytriton/proxy/types.py deleted file mode 100644 index f68449febb1848636db5c37f53412f6ac4d19897..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/proxy/types.py +++ /dev/null @@ -1,104 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Common data structures and type used by proxy model and inference handler.""" - -import dataclasses -from typing import Dict, List, Optional, Union - -import numpy as np - - -@dataclasses.dataclass -class Request: - """Data class for request data including numpy array inputs.""" - - data: Dict[str, np.ndarray] - parameters: Optional[Dict[str, Union[str, int, bool]]] = None - - def __getitem__(self, input_name: str) -> np.ndarray: - """Get input data.""" - return self.data[input_name] - - def __setitem__(self, input_name: str, input_data: np.ndarray): - """Set input data.""" - self.data[input_name] = input_data - - def __delitem__(self, input_name: str): - """Delete input data from request.""" - del self.data[input_name] - - def __len__(self): - """Get number of inputs.""" - return len(self.data) - - def __iter__(self): - """Iterate over input names.""" - return iter(self.data) - - def items(self): - """Iterate over input names and data.""" - return self.data.items() - - def keys(self): - """Iterate over input names.""" - return self.data.keys() - - def values(self): - """Iterate over input data.""" - return self.data.values() - - -Requests = List[Request] - - -@dataclasses.dataclass -class Response: - """Data class for response data including numpy array outputs.""" - - data: Dict[str, np.ndarray] - - def __getitem__(self, output_name: str) -> np.ndarray: - """Get output data.""" - return self.data[output_name] - - def __setitem__(self, output_name: str, output_data: np.ndarray): - """Set output data.""" - self.data[output_name] = output_data - - def __delitem__(self, output_name: str): - """Delete output data from response.""" - del self.data[output_name] - - def __len__(self): - """Get number of outputs.""" - return len(self.data) - - def __iter__(self): - """Iterate over output names.""" - return iter(self.data) - - def items(self): - """Iterate over output names and data.""" - return self.data.items() - - def keys(self): - """Iterate over output names.""" - return self.data.keys() - - def values(self): - """Iterate over output data.""" - return self.data.values() - - -Responses = List[Response] diff --git a/stf/stf-api-alternative/pytriton/pytriton/proxy/validators.py b/stf/stf-api-alternative/pytriton/pytriton/proxy/validators.py deleted file mode 100644 index de63789cad3fffdc291194132415a0ee809d44a8..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/proxy/validators.py +++ /dev/null @@ -1,171 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Validators used in proxy module.""" -import logging - -import numpy as np - -from pytriton.proxy.types import Requests, Responses - -LOGGER = logging.getLogger(__name__) - - -class TritonResultsValidator: - """Validate results returned by inference callable against PyTriton and Triton requirements.""" - - def __init__(self, model_config, strict: bool): - """Validate results returned by inference callable against PyTriton and Triton requirements. - - Args: - model_config: Model configuration on Triton side - strict: Enable/disable strict validation against model config - """ - self._model_config = model_config - self._model_outputs = {output.name: output for output in model_config.outputs} - self._strict = strict - - def validate_responses(self, requests: Requests, responses: Responses): - """Validate responses returned by inference callable against PyTriton and Triton requirements. - - Args: - requests: Requests received from Triton - responses: Responses returned by inference callable - - Raises: - ValueError if responses are incorrect - """ - requests_number = len(requests) - _validate_outputs(self._model_config, self._model_outputs, responses, self._strict, requests_number) - - -def _validate_outputs(model_config, model_outputs, outputs, strict: bool, requests_number: int): - """Validate outputs of model. - - Args: - model_config: Model configuration on Triton side - model_outputs: Mapped outputs configuration - outputs: Returned outputs from inference callable - strict: Enable/disable strict validation against model config - requests_number: Number of requests - - Raises: - ValueError if outputs are incorrect - """ - if not isinstance(outputs, list): - raise ValueError( - f"Outputs returned by `{model_config.model_name}` model callable " - "must be list of response dicts with numpy arrays" - ) - if len(outputs) != requests_number: - raise ValueError( - f"Number of outputs returned by `{model_config.model_name}` inference callable " - f"({len(outputs)}) does not match number of requests ({requests_number}) received from Triton." - ) - - LOGGER.debug(f"Outputs: {outputs}") - for response in outputs: - LOGGER.debug(f"Response: {response}") - if not isinstance(response, dict): - raise ValueError( - f"Outputs returned by `{model_config.model_name}` model callable " - "must be list of response dicts with numpy arrays" - ) - for name, value in response.items(): - LOGGER.debug(f"{name}: {value}") - _validate_output_data(model_config, name, value) - if strict: - _validate_output_dtype_and_shape(model_config, model_outputs, name, value) - - -def _validate_output_data(model_config, name, value): - """Validate output with given name and value. - - Args: - model_config: Model configuration on Triton side - name: Name of output - value: Value returned in output - - Raises: - ValueError if output is incorrect - """ - if not isinstance(name, str): - raise ValueError(f"Not all keys returned by `{model_config.model_name}` model callable are string") - if not isinstance(value, np.ndarray): - raise ValueError(f"Not all values returned by `{model_config.model_name}` model callable are numpy arrays") - else: - allowed_kind = "biufOSU" - if value.dtype.kind not in allowed_kind: - raise ValueError( - f"Only bool, numeric, string, unicode and object arrays " - f"are supported by Triton (dtype.kind: {allowed_kind}). " - f"Returned `{name}` for model `{model_config.model_name}` " - f"has `{value.dtype.kind}` dtype.kind." - ) - if value.dtype.kind == "O": - if isinstance(value.item(0), str): - raise ValueError( - "Use string/byte-string instead of object for passing " - f"string in NumPy array from model `{model_config.model_name}`." - ) - elif not isinstance(value.item(0), bytes): - raise ValueError( - "Only bytes as objects dtype are supported by PyTriton. " - f"Returned `{name}` from `{model_config.model_name}` " - f"has `{type(value.item(0))}` type." - ) - - -def _validate_output_dtype_and_shape(model_config, model_outputs, name, value): - """Validate output with given name and value against the model config. - - Args: - model_config: Model configuration on Triton side - model_outputs: Mapped outputs defined in model config - name: Name of output - value: Value returned in output - - Raises: - ValueError if output does not match defined values in model config - """ - output_config = model_outputs.get(name) - if not output_config: - raise ValueError( - f"Returned output `{name}` is not defined in model config for model `{model_config.model_name}`." - ) - - allowed_object_types = [bytes, object, np.bytes_, np.object_] - if (value.dtype.kind not in "OSU" and not np.issubdtype(value.dtype, output_config.dtype)) or ( - value.dtype.kind in "OSU" and output_config.dtype not in allowed_object_types - ): - raise ValueError( - f"Returned output `{name}` for model `{model_config.model_name}` has invalid type. " - f"Returned: {value.dtype} ({value.dtype.kind}). Expected: {output_config.dtype}." - ) - - batch_shape = 1 if model_config.batching else 0 - LOGGER.debug( - f"Current output `{name}` for model `{model_config.model_name}` has shape: {value.shape[batch_shape:]}" - ) - LOGGER.debug(f"Expected output `{name}` for model `{model_config.model_name}` has shape: {output_config.shape}") - if len(value.shape[batch_shape:]) != len(output_config.shape): - raise ValueError( - f"Returned output `{name}` for model `{model_config.model_name}` has invalid shapes. " - f"Returned: {value.shape[batch_shape:]}. Expected: {output_config.shape}." - ) - if any(x != y != -1 for x, y in zip(value.shape[batch_shape:], output_config.shape)): - raise ValueError( - f"Returned output `{name}` for model `{model_config.model_name}` " - "has invalid shapes at one or more positions. " - f"Returned: {value.shape[batch_shape:]}. Expected: {output_config.shape}." - ) diff --git a/stf/stf-api-alternative/pytriton/pytriton/server/__init__.py b/stf/stf-api-alternative/pytriton/pytriton/server/__init__.py deleted file mode 100644 index 935a79d63fab2a26564aa03472f3af1e62b0a954..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/server/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) 2020-2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# noqa: D104 diff --git a/stf/stf-api-alternative/pytriton/pytriton/server/python_backend_config.py b/stf/stf-api-alternative/pytriton/pytriton/server/python_backend_config.py deleted file mode 100644 index fa52c2a9575f85838c31f201379a435ed9a14bef..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/server/python_backend_config.py +++ /dev/null @@ -1,149 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Python Backend configuration class. - -Use to configure the CLI argument for Python Backend passed on Triton Inference Server process start. - - Examples of use: - - config = PythonBackendConfig() - config["shm-default-byte-size"] = 33554432 - config.to_list_args() # ["python,shm-default-byte-size=33554432"] -""" -from typing import Any, Dict, List, Optional, Union - -from pytriton.exceptions import PyTritonError - - -class PythonBackendConfig: - """A config class to set arguments to the Triton Inference Server. - - An argument set to None will use the server default. - """ - - backend_arg_keys = [ - "shm-region-prefix-name", - "shm-default-byte-size", - "shm-growth-byte-size", - ] - - def __init__(self): - """Construct PythonBackendConfig.""" - self._backend_args = {} - - @classmethod - def allowed_keys(cls): - """Return the list of available server arguments with snake cased options. - - Returns: - List of str. The keys that can be used to configure Python Backend instance - """ - snake_cased_keys = [key.replace("-", "_") for key in cls.backend_arg_keys] - return cls.backend_arg_keys + snake_cased_keys - - @classmethod - def backend_keys(cls): - """Return the list of available server arguments with snake cased options. - - Returns: - List of str. The keys that can be used to configure Python Backend instance - """ - snake_cased_keys = [key.replace("-", "_") for key in cls.backend_arg_keys] - return cls.backend_arg_keys + snake_cased_keys - - def update_config(self, params: Optional[Dict] = None) -> None: - """Allows setting values from a params dict. - - Args: - params: The keys are allowed args to perf_analyzer - """ - if params: - for key in params: - self[key.strip().replace("_", "-")] = params[key] - - def to_list_args(self) -> List[str]: - """Utility function to convert a config into a list of arguments to the server with CLI. - - Returns: - The command consisting of all set arguments to the Python Backend. - e.g. ['python,shm-default-byte-size=33554432'] - """ - cli_items = [] - for key, val in self._backend_args.items(): - if val is None: - continue - cli_items.append(f"python,{key}={val}") - - return cli_items - - def copy(self) -> "PythonBackendConfig": - """Create copy of config. - - Returns: - PythonBackendConfig object that has the same args as this one - """ - config_copy = PythonBackendConfig() - config_copy.update_config(params=self._backend_args) - return config_copy - - def backend_args(self) -> Dict: - """Return the dict with defined server arguments. - - Returns: - Dict where keys are server arguments values are their values - """ - return self._backend_args - - def __getitem__(self, key: str) -> Any: - """Gets an arguments value in config. - - Args: - key: The name of the argument to the Python Backend - - Returns: - The value that the argument is set to in this config - """ - kebab_cased_key = key.strip().replace("_", "-") - return self._backend_args.get(kebab_cased_key, None) - - def __setitem__(self, key: str, value: Union[str, int]) -> None: - """Sets an arguments value in config after checking if defined/supported. - - Args: - key: The name of the argument to the Python Backend - value: The value to which the argument is being set - - Raises: - PyTritonError: if key is unsupported or undefined in the config class - """ - assert isinstance(value, int) or isinstance(value, str) - - kebab_cased_key = key.strip().replace("_", "-") - if kebab_cased_key in self.backend_arg_keys: - self._backend_args[kebab_cased_key] = value - else: - raise PyTritonError(f"The argument {key!r} to the Python Backend is not supported by the pytriton.") - - def __contains__(self, key: str) -> bool: - """Checks if an argument is defined in the PythonBackendConfig. - - Args: - key: The name of the attribute to check for definition in PythonBackendConfig - - Returns: - True if the argument is defined in the config, False otherwise - """ - kebab_cased_key = key.strip().replace("_", "-") - value = self._backend_args.get(kebab_cased_key, None) - return value is not None diff --git a/stf/stf-api-alternative/pytriton/pytriton/server/triton_server.py b/stf/stf-api-alternative/pytriton/pytriton/server/triton_server.py deleted file mode 100644 index 202fe46949cfe74fb30cbc2cf13a1617d204358e..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/server/triton_server.py +++ /dev/null @@ -1,279 +0,0 @@ -# Copyright (c) 2020-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Triton Inference Server class. - -Use to start and maintain the Triton Inference Server process. - - Examples of use: - - server = TritonServer( - path="/path/to/tritonserver/binary", - libs_path="/path/to/tritonserver/libraries", - config=TritonServerConfig() - ) - server.start() - -""" -import ctypes.util -import importlib -import json -import logging -import os -import pathlib -import signal -import sys -import threading -import traceback -from typing import Callable, Dict, Literal, Optional, Sequence, Union - -from pytriton.utils.logging import silence_3rd_party_loggers - -from ..utils import endpoint_utils -from .triton_server_config import TritonServerConfig - -LOGGER = logging.getLogger(__name__) -SERVER_OUTPUT_TIMEOUT_SECS = 30 -_PROXY_REQUIRED_MODULES = ["numpy", "zmq"] -_PYTRITON_STARTED_IN_PY310 = (3, 10) <= sys.version_info < (3, 11) - -silence_3rd_party_loggers() - - -def get_triton_python_backend_python_env() -> pathlib.Path: - """Get the path to the python environment for the triton python backend. - - Officially built python backend is built with python 3.8 so need to - use the same python version to run the python backend. - - Also, python environment should contain packages required by the proxy. - - Returns: - Path to the python environment with python 3.8 - """ - env_path = pathlib.Path(sys.exec_prefix) - installed_modules = [] - missing_modules = [] - for module_name in _PROXY_REQUIRED_MODULES: - try: - importlib.import_module(module_name) - installed_modules.append(module_name) - except ImportError: - missing_modules.append(module_name) - - if missing_modules: - raise RuntimeError( - "Python environment for python backend is missing required packages. " - f"Ensure that you have {', '.join(_PROXY_REQUIRED_MODULES)} installed in the {env_path} environment. " - f"Installed modules {', '.join(installed_modules)}. Missing modules {', '.join(missing_modules)}." - ) - - return env_path - - -class TritonServer: - """Implementation of TritonServer interface that runs tritonserver locally as subprocess.""" - - def __init__( - self, - *, - path: Union[str, pathlib.Path], - libs_path: Union[str, pathlib.Path], - config: TritonServerConfig, - gpus: Optional[Sequence[int]] = None, - verbose: bool = True, - ): - """Triton server constructor. - - Args: - path: The absolute path to the tritonserver executable - libs_path: The absolute path to the tritonserver libraries - config: The config object containing arguments for this server instance - gpus: sequence of GPUs device ids to attach to process - verbose: Enable verbose logging of server to STDOUT - """ - self._server_path = pathlib.Path(path) - self._server_libs_path = pathlib.Path(libs_path) - self._server_config = config - self._gpus = gpus - self._tritonserver_running_cmd = None - self._tritonserver_logs = "" - self._verbose = verbose - self._on_exit_lock = threading.RLock() - self._on_exit = [] - - assert self._server_config["model-repository"], "Triton Server requires --model-repository argument to be set." - - def start(self) -> None: - """Starts the tritonserver process. - - The method can be executed multiple times and only single process is started. - """ - if self.is_alive(): - raise RuntimeError( - f"You have to stop previously started tritonserver process first " - f"pid={self._tritonserver_running_cmd.pid}" - ) - else: - env = self._get_env() - - LOGGER.debug(f"Triton Server binary {self._server_path}. Environment:\n{json.dumps(env, indent=4)}") - tritonserver_cmd, *rest = self._server_path.as_posix().split(" ", 1) - - import sh - - tritonserver_cmd = sh.Command(tritonserver_cmd) - tritonserver_cmd = tritonserver_cmd.bake(*rest) - - tritonserver_args = self._server_config.to_args_list() - - def _preexec_fn(): - PR_SET_PDEATHSIG = 1 # noqa - libc = ctypes.CDLL(ctypes.util.find_library("c"), use_errno=True) - libc.prctl(PR_SET_PDEATHSIG, signal.SIGTERM) - - self._tritonserver_logs = "" - self._tritonserver_running_cmd = tritonserver_cmd( - *tritonserver_args, - _env=env, - _err_to_out=True, - _out=self._record_logs, - _out_bufsize=0, - _err_bufsize=0, - _bg=True, - _bg_exc=False, - _done=self._handle_exit, - _preexec_fn=_preexec_fn, - ) - - def stop(self) -> None: - """Send the SIGINT signal to running process and wait until server finished.""" - if self.is_alive(): - LOGGER.debug( - f"Stopping Triton Inference server - sending SIGINT signal and wait {SERVER_OUTPUT_TIMEOUT_SECS}s" - ) - self._tritonserver_running_cmd.process.signal(signal.SIGINT) - try: - LOGGER.debug("Waiting for process to stop.") - self._tritonserver_running_cmd.wait(timeout=SERVER_OUTPUT_TIMEOUT_SECS) - except Exception: - message = traceback.format_exc() - LOGGER.debug(f"Error message: \n{message}") - try: - if self.is_alive(): - LOGGER.debug("Timeout waiting for server. Trying to kill process.") - self._tritonserver_running_cmd.process.kill() - self._tritonserver_running_cmd.wait(timeout=SERVER_OUTPUT_TIMEOUT_SECS) - except Exception: - LOGGER.debug(f"Could not kill triton server pid={self._tritonserver_running_cmd.pid}") - message = traceback.format_exc() - LOGGER.debug(f"Error message: \n{message}") - - def register_on_exit(self, callback: Callable) -> None: - """Register callback executed on process exit. - - Args: - callback: callable to register in callbacks - """ - with self._on_exit_lock: - self._on_exit.append(callback) - - def unregister_on_exit(self, callback: Callable) -> None: - """Unregister callback executed on process exit. - - Args: - callback: callable to unregister from callbacks - """ - with self._on_exit_lock: - self._on_exit.remove(callback) - - def is_alive(self) -> bool: - """Verify if server is currently running. - - Returns: - True when server is running, False otherwise - """ - return self._tritonserver_running_cmd is not None and self._tritonserver_running_cmd.is_alive() - - def logs(self) -> str: - """Return the server logs of running server. - - Returns: - String with capture logs - """ - return self._tritonserver_logs - - def get_endpoint(self, endpoint: Literal["http", "grpc", "metrics"]) -> str: - """Get endpoint url. - - Args: - endpoint: endpoint name - - Returns: - endpoint url in form of {protocol}://{host}:{port} - """ - return endpoint_utils.get_endpoint(self._server_config, endpoint) - - def _record_logs(self, line: Union[bytes, str]) -> None: - """Record logs obtained from server process. If verbose logging enabled, print the log into STDOUT. - - Args: - line: Log line obtained from server - """ - if isinstance(line, bytes): - line = line.decode("utf-8", errors="replace") - - if self._verbose: - print(line, end="") # noqa: T201 - - self._tritonserver_logs += line - - def _get_env(self) -> Dict: - """Create and return environment variables for server execution. - - Returns: - Dict with environment variables - """ - env = os.environ.copy() - if self._gpus and isinstance(self._gpus, (list, tuple)): - env["CUDA_VISIBLE_DEVICES"] = ",".join([str(gpu) for gpu in self._gpus]) - - if "LD_LIBRARY_PATH" in env: - env["LD_LIBRARY_PATH"] += ":" + self._server_libs_path.as_posix() - else: - env["LD_LIBRARY_PATH"] = self._server_libs_path.as_posix() - - env_path = get_triton_python_backend_python_env() - python_bin_directory = env_path / "bin" - env["PATH"] = f"{python_bin_directory.as_posix()}:{env['PATH']}" - - return env - - def _handle_exit(self, _, success, exit_code) -> None: - """Handle exit of server process. Trigger callbacks if provided. - - Args: - success: Flag indicating if process succeeded or failed - exit_code: Exit code with which server process finished - """ - if not success: - LOGGER.warning("Triton Inference Server exited with failure. Please wait.") - LOGGER.debug(f"Triton Inference Server exit code {exit_code}") - else: - LOGGER.debug("Triton Inference Server stopped") - with self._on_exit_lock: - for callback in self._on_exit: - try: - callback(success, exit_code) - except Exception as e: - LOGGER.debug(f"Error during calling on_exit callback; {e}") diff --git a/stf/stf-api-alternative/pytriton/pytriton/server/triton_server_config.py b/stf/stf-api-alternative/pytriton/pytriton/server/triton_server_config.py deleted file mode 100644 index 4637f6376d987831e923a44f3980ad41578d5454..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/server/triton_server_config.py +++ /dev/null @@ -1,243 +0,0 @@ -# Copyright (c) 2020-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Triton Inference Server configuration class. - -Use to configure the CLI argument for starting the Triton Inference Server process. - - Examples of use: - - config = TritonServerConfig() - config["log-verbose"] = 1 - config.to_cli_string() -""" -from typing import Any, Dict, List, Optional - -from pytriton.exceptions import PyTritonError - - -class TritonServerConfig: - """A config class to set arguments to the Triton Inference Server. - - An argument set to None will use the server default. - """ - - # https://github.com/triton-inference-server/server/blob/main/src/command_line_parser.cc - server_arg_keys = [ - # Server - "id", - # Logging - "log-verbose", - "log-info", - "log-warning", - "log-error", - "log-format", - "log-file", - # Model Repository - "model-store", - "model-repository", - # Exit - "exit-timeout-secs", - "exit-on-error", - # Strictness - "disable-auto-complete-config", - "strict-model-config", - "strict-readiness", - # http options - "allow-http", - "http-address", - "http-port", - "reuse-http-port", - "http-header-forward-pattern", - "http-thread-count", - # grpc options - "allow-grpc", - "grpc-address", - "grpc-port", - "reuse-grpc-port", - "grpc-header-forward-pattern", - "grpc-infer-allocation-pool-size", - "grpc-use-ssl", - "grpc-use-ssl-mutual", - "grpc-server-cert", - "grpc-server-key", - "grpc-root-cert", - "grpc-infer-response-compression-level", - "grpc-keepalive-time", - "grpc-keepalive-timeout", - "grpc-keepalive-permit-without-calls", - "grpc-http2-max-pings-without-data", - "grpc-http2-min-recv-ping-interval-without-data", - "grpc-http2-max-ping-strikes", - "grpc-restricted-protocol", - # metrics options - "allow-metrics", - "allow-gpu-metrics", - "allow-cpu-metrics", - "metrics-interval-ms", - "metrics-port", - "metrics-address", - # Model control - "model-control-mode", - "repository-poll-secs", - "load-model", - # Memory and GPU - "pinned-memory-pool-byte-size", - "cuda-memory-pool-byte-size", - "min-supported-compute-capability", - "buffer-manager-thread-count", - # Backend config - "backend-directory", - "backend-config", - "allow-soft-placement", - "gpu-memory-fraction", - "tensorflow-version", - # SageMaker integration - "allow-sagemaker", - "sagemaker-port", - "sagemaker-safe-port-range", - "sagemaker-thread-count", - # VertexAI integration - "allow-vertex-ai", - "vertex-ai-port", - "vertex-ai-thread-count", - "vertex-ai-default-model", - "metrics-config", - "trace-config", - "cache-config", - "cache-directory", - ] - - def __init__(self): - """Construct TritonServerConfig.""" - self._server_args = {} - - @classmethod - def allowed_keys(cls): - """Return the list of available server arguments with snake cased options. - - Returns: - List of str. The keys that can be used to configure tritonserver instance - """ - snake_cased_keys = [key.replace("-", "_") for key in cls.server_arg_keys] - return cls.server_arg_keys + snake_cased_keys - - def update_config(self, params: Optional[Dict] = None) -> None: - """Allows setting values from a params dict. - - Args: - params: The keys are allowed args to perf_analyzer - """ - if params: - for key in params: - self[key.strip().replace("_", "-")] = params[key] - - def to_cli_string(self) -> str: - """Utility function to convert a config into a string of arguments to the server with CLI. - - Returns: - The command consisting of all set arguments to the tritonserver. - e.g. '--model-repository=/models --log-verbose=True' - """ - cli_items = [] - for key, val in self._server_args.items(): - if val is None: - continue - if isinstance(val, (tuple, list)): - for sub_val in val: - cli_items.append(f"--{key}={sub_val}") - else: - cli_items.append(f"--{key}={val}") - return " ".join(cli_items) - - def to_args_list(self) -> List: - """Utility function to convert a cli string into a list of arguments. - - The function is taking into account "smart" delimiters. Notice in the example below that only the first equals - sign is used as split delimiter. - - Returns: - The list of arguments consisting of all set arguments to the tritonserver. - - Example: - input cli_string: "--model-control-mode=explicit - --backend-config=tensorflow,version=2" - output: ['--model-control-mode', 'explicit', - '--backend-config', 'tensorflow,version=2'] - """ - args_list = [] - args = self.to_cli_string().split() - for arg in args: - args_list += arg.split("=", 1) - return args_list - - def copy(self) -> "TritonServerConfig": - """Create copy of config. - - Returns: - TritonServerConfig object that has the same args as this one - """ - config_copy = TritonServerConfig() - config_copy.update_config(params=self._server_args) - return config_copy - - def server_args(self) -> Dict: - """Return the dict with defined server arguments. - - Returns: - Dict where keys are server arguments values are their values - """ - return self._server_args - - def __getitem__(self, key: str) -> Any: - """Gets an arguments value in config. - - Args: - key: The name of the argument to the tritonserver - - Returns: - The value that the argument is set to in this config - """ - kebab_cased_key = key.strip().replace("_", "-") - return self._server_args.get(kebab_cased_key, None) - - def __setitem__(self, key: str, value: Any) -> None: - """Sets an arguments value in config after checking if defined/supported. - - Args: - key: The name of the argument to the tritonserver - value: The value to which the argument is being set - - Raises: - PyTritonError: if key is unsupported or undefined in the config class - """ - kebab_cased_key = key.strip().replace("_", "-") - if kebab_cased_key in self.server_arg_keys: - self._server_args[kebab_cased_key] = value - else: - raise PyTritonError( - f"The argument {key!r} to the Triton Inference " "Server is not supported by the pytriton." - ) - - def __contains__(self, key: str) -> bool: - """Checks if an argument is defined in the TritonServerConfig. - - Args: - key: The name of the attribute to check for definition in TritonServerConfig - - Returns: - True if the argument is defined in the config, False otherwise - """ - kebab_cased_key = key.strip().replace("_", "-") - value = self._server_args.get(kebab_cased_key, None) - return value is not None diff --git a/stf/stf-api-alternative/pytriton/pytriton/triton.py b/stf/stf-api-alternative/pytriton/pytriton/triton.py deleted file mode 100644 index ac9c687b0dbefede8ebdaabc93d8a0f23a47db30..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/triton.py +++ /dev/null @@ -1,779 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Triton Inference Server class. - -The class provide functionality to run Triton Inference Server, load the Python models and serve the requests/response -for models inference. - - Examples of use: - with Triton() as triton: - triton.bind( - model_name="BERT", - infer_func=_infer_fn, - inputs=[ - Tensor(dtype=np.bytes_, shape=(1,)), - ], - outputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - ], - config=PythonModelConfig(max_batch_size=16), - ) - triton.serve() -""" -import atexit -import codecs -import contextlib -import dataclasses -import logging -import os -import pathlib -import re -import shutil -import sys -import threading -import threading as th -import typing -from typing import Any, Callable, Dict, List, Optional, Sequence, Union - -import typing_inspect - -from pytriton.client import ModelClient -from pytriton.client.utils import TritonUrl, create_client_from_url, wait_for_server_ready -from pytriton.constants import DEFAULT_TRITON_STARTUP_TIMEOUT_S -from pytriton.decorators import TritonContext -from pytriton.exceptions import PyTritonValidationError -from pytriton.model_config.tensor import Tensor -from pytriton.models.manager import ModelManager -from pytriton.models.model import Model, ModelConfig, ModelEvent -from pytriton.proxy.communication import TensorStore -from pytriton.server.python_backend_config import PythonBackendConfig -from pytriton.server.triton_server import TritonServer -from pytriton.server.triton_server_config import TritonServerConfig -from pytriton.utils import endpoint_utils -from pytriton.utils.dataclasses import kwonly_dataclass -from pytriton.utils.distribution import get_libs_path, get_root_module_path, get_stub_path -from pytriton.utils.workspace import Workspace - -LOGGER = logging.getLogger(__name__) - -TRITONSERVER_DIST_DIR = get_root_module_path() / "tritonserver" -MONITORING_PERIOD_S = 10.0 -WAIT_FORM_MODEL_TIMEOUT_S = 60.0 -INITIAL_BACKEND_SHM_SIZE = 4194304 # 4MB, Python Backend default is 64MB, but is automatically increased -GROWTH_BACKEND_SHM_SIZE = 1048576 # 1MB, Python Backend default is 64MB - -MODEL_URL = "/v2/models/{model_name}" -MODEL_READY_URL = f"{MODEL_URL}/ready/" -MODEL_CONFIG_URL = f"{MODEL_URL}/config/" -MODEL_INFER_URL = f"{MODEL_URL}/infer/" - - -# see https://github.com/triton-inference-server/server/blob/main/src/command_line_parser.cc for more details -@kwonly_dataclass -@dataclasses.dataclass -class TritonConfig: - """Triton Inference Server configuration class for customization of server execution. - - The arguments are optional. If value is not provided the defaults for Triton Inference Server are used. - Please, refer to https://github.com/triton-inference-server/server/ for more details. - - Args: - id: Identifier for this server. - log_verbose: Set verbose logging level. Zero (0) disables verbose logging and - values >= 1 enable verbose logging. - log_file: Set the name of the log output file. - exit_timeout_secs: Timeout (in seconds) when exiting to wait for in-flight inferences to finish. - exit_on_error: Exit the inference server if an error occurs during initialization. - strict_readiness: If true /v2/health/ready endpoint indicates ready if the server is - responsive and all models are available. - allow_http: Allow the server to listen for HTTP requests. - http_address: The address for the http server to bind to. Default is 0.0.0.0. - http_port: The port for the server to listen on for HTTP requests. Default is 8000. - http_header_forward_pattern: The regular expression pattern - that will be used for forwarding HTTP headers as inference request parameters. - http_thread_count: Number of threads handling HTTP requests. - allow_grpc: Allow the server to listen for GRPC requests. - grpc_address: The address for the grpc server to binds to. Default is 0.0.0.0. - grpc_port: The port for the server to listen on for GRPC requests. Default is 8001. - grpc_header_forward_pattern: The regular expression pattern that will be used - for forwarding GRPC headers as inference request parameters. - grpc_infer_allocation_pool_size: The maximum number of inference request/response objects - that remain allocated for reuse. As long as the number of in-flight requests doesn't exceed - this value there will be no allocation/deallocation of request/response objects. - grpc_use_ssl: Use SSL authentication for GRPC requests. Default is false. - grpc_use_ssl_mutual: Use mututal SSL authentication for GRPC requests. - This option will preempt grpc_use_ssl if it is also specified. Default is false. - grpc_server_cert: File holding PEM-encoded server certificate. Ignored unless grpc_use_ssl is true. - grpc_server_key: Path to file holding PEM-encoded server key. Ignored unless grpc_use_ssl is true. - grpc_root_cert: Path to file holding PEM-encoded root certificate. Ignored unless grpc_use_ssl is true. - grpc_infer_response_compression_level: The compression level to be used while returning the inference - response to the peer. Allowed values are none, low, medium and high. Default is none. - grpc_keepalive_time: The period (in milliseconds) after which a keepalive ping is sent on the transport. - grpc_keepalive_timeout: The period (in milliseconds) the sender of the keepalive ping waits - for an acknowledgement. - grpc_keepalive_permit_without_calls: Allows keepalive pings to be sent even if there are no calls in flight - grpc_http2_max_pings_without_data: The maximum number of pings that can be sent when there is no - data/header frame to be sent. - grpc_http2_min_recv_ping_interval_without_data: If there are no data/header frames being sent on the - transport, this channel argument on the server side controls the minimum time (in milliseconds) that - gRPC Core would expect between receiving successive pings. - grpc_http2_max_ping_strikes: Maximum number of bad pings that the server will tolerate before sending - an HTTP2 GOAWAY frame and closing the transport. - grpc_restricted_protocol: Specify restricted GRPC protocol setting. - The format of this flag is ,=. - Where is a comma-separated list of protocols to be restricted. - will be additional header key to be checked when a GRPC request - is received, and is the value expected to be matched. - allow_metrics: Allow the server to provide prometheus metrics. - allow_gpu_metrics: Allow the server to provide GPU metrics. - allow_cpu_metrics: Allow the server to provide CPU metrics. - metrics_interval_ms: Metrics will be collected once every milliseconds. - metrics_port: The port reporting prometheus metrics. - metrics_address: The address for the metrics server to bind to. Default is the same as http_address. - allow_sagemaker: Allow the server to listen for Sagemaker requests. - sagemaker_port: The port for the server to listen on for Sagemaker requests. - sagemaker_safe_port_range: Set the allowed port range for endpoints other than the SageMaker endpoints. - sagemaker_thread_count: Number of threads handling Sagemaker requests. - allow_vertex_ai: Allow the server to listen for Vertex AI requests. - vertex_ai_port: The port for the server to listen on for Vertex AI requests. - vertex_ai_thread_count: Number of threads handling Vertex AI requests. - vertex_ai_default_model: The name of the model to use for single-model inference requests. - metrics_config: Specify a metrics-specific configuration setting. - The format of this flag is =. It can be specified multiple times - trace_config: Specify global or trace mode specific configuration setting. - The format of this flag is ,=. - Where is either 'triton' or 'opentelemetry'. The default is 'triton'. - To specify global trace settings (level, rate, count, or mode), the format would be =. - For 'triton' mode, the server will use Triton's Trace APIs. - For 'opentelemetry' mode, the server will use OpenTelemetry's APIs to generate, - collect and export traces for individual inference requests. - cache_config: Specify a cache-specific configuration setting. - The format of this flag is ,=. - Where is the name of the cache, such as 'local' or 'redis'. - Example: local,size=1048576 will configure a 'local' cache implementation - with a fixed buffer pool of size 1048576 bytes. - cache_directory: The global directory searched for cache shared libraries. Default is '/opt/tritonserver/caches'. - This directory is expected to contain a cache implementation as a shared library with the name 'libtritoncache.so'. - buffer_manager_thread_count: The number of threads used to accelerate copies and other operations - required to manage input and output tensor contents. - """ - - model_repository: Optional[pathlib.Path] = None - id: Optional[str] = None - log_verbose: Optional[int] = None - log_file: Optional[pathlib.Path] = None - exit_timeout_secs: Optional[int] = None - exit_on_error: Optional[bool] = None - strict_readiness: Optional[bool] = None - allow_http: Optional[bool] = None - http_address: Optional[str] = None - http_port: Optional[int] = None - http_header_forward_pattern: Optional[str] = None - http_thread_count: Optional[int] = None - allow_grpc: Optional[bool] = None - grpc_address: Optional[str] = None - grpc_port: Optional[int] = None - grpc_header_forward_pattern: Optional[str] = None - grpc_infer_allocation_pool_size: Optional[int] = None - grpc_use_ssl: Optional[bool] = None - grpc_use_ssl_mutual: Optional[bool] = None - grpc_server_cert: Optional[pathlib.Path] = None - grpc_server_key: Optional[pathlib.Path] = None - grpc_root_cert: Optional[pathlib.Path] = None - grpc_infer_response_compression_level: Optional[str] = None - grpc_keepalive_time: Optional[int] = None - grpc_keepalive_timeout: Optional[int] = None - grpc_keepalive_permit_without_calls: Optional[bool] = None - grpc_http2_max_pings_without_data: Optional[int] = None - grpc_http2_min_recv_ping_interval_without_data: Optional[int] = None - grpc_http2_max_ping_strikes: Optional[int] = None - allow_metrics: Optional[bool] = None - allow_gpu_metrics: Optional[bool] = None - allow_cpu_metrics: Optional[bool] = None - metrics_interval_ms: Optional[int] = None - metrics_port: Optional[int] = None - metrics_address: Optional[str] = None - allow_sagemaker: Optional[bool] = None - sagemaker_port: Optional[int] = None - sagemaker_safe_port_range: Optional[str] = None - sagemaker_thread_count: Optional[int] = None - allow_vertex_ai: Optional[bool] = None - vertex_ai_port: Optional[int] = None - vertex_ai_thread_count: Optional[int] = None - vertex_ai_default_model: Optional[str] = None - metrics_config: Optional[List[str]] = None - trace_config: Optional[List[str]] = None - cache_config: Optional[List[str]] = None - cache_directory: Optional[str] = None - buffer_manager_thread_count: Optional[int] = None - - def __post_init__(self): - """Validate configuration for early error handling.""" - if self.allow_http not in [True, None] and self.allow_grpc not in [True, None]: - raise PyTritonValidationError("The `http` or `grpc` endpoint has to be allowed.") - - def to_dict(self): - """Map config object to dictionary.""" - return dataclasses.asdict(self) - - @classmethod - def from_dict(cls, config: Dict[str, Any]) -> "TritonConfig": - """Creates a ``TritonConfig`` instance from an input dictionary. Values are converted into correct types. - - Args: - config: a dictionary with all required fields - - Returns: - a ``TritonConfig`` instance - """ - fields: Dict[str, dataclasses.Field] = {field.name: field for field in dataclasses.fields(cls)} - unknown_config_parameters = {name: value for name, value in config.items() if name not in fields} - for name, value in unknown_config_parameters.items(): - LOGGER.warning( - f"Ignoring {name}={value} as could not find matching config field. " - f"Available fields: {', '.join(map(str, fields))}" - ) - - def _cast_value(_field, _value): - field_type = _field.type - is_optional = typing_inspect.is_optional_type(field_type) - if is_optional: - field_type = field_type.__args__[0] - return field_type(_value) - - config_with_casted_values = { - name: _cast_value(fields[name], value) for name, value in config.items() if name in fields - } - return cls(**config_with_casted_values) - - @classmethod - def from_env(cls) -> "TritonConfig": - """Creates TritonConfig from environment variables. - - Environment variables should start with `PYTRITON_TRITON_CONFIG_` prefix. For example: - - PYTRITON_TRITON_CONFIG_GRPC_PORT=45436 - PYTRITON_TRITON_CONFIG_LOG_VERBOSE=4 - - Typical use: - - triton_config = TritonConfig.from_env() - - Returns: - TritonConfig class instantiated from environment variables. - """ - prefix = "PYTRITON_TRITON_CONFIG_" - config = {name[len(prefix) :].lower(): value for name, value in os.environ.items() if name.startswith(prefix)} - return cls.from_dict(config) - - -class _LogLevelChecker: - """Check if log level is too verbose.""" - - def __init__(self, url: str) -> None: - """Initialize LogLevelChecker. - - Args: - url: Triton Inference Server URL in form of ://: - - Raises: - PyTritonClientInvalidUrlError: if url is invalid - """ - self._log_settings = None - self._url = url - - def check(self, skip_update: bool = False): - """Check if log level is too verbose. - - Also obtains wait for server is ready + log settings from server if not already obtained. - - Raises: - PyTritonClientTimeoutError: if timeout is reached - """ - if self._log_settings is None and not skip_update: - with contextlib.closing(create_client_from_url(self._url)) as client: - wait_for_server_ready(client, timeout_s=DEFAULT_TRITON_STARTUP_TIMEOUT_S) - self._log_settings = client.get_log_settings() - - if self._log_settings is not None: - log_settings = self._log_settings - if hasattr(log_settings, "settings"): # grpc client - log_settings = log_settings.settings - log_settings = {key: value.string_param for key, value in log_settings.items()} - else: # http client - log_settings = {key: str(value) for key, value in log_settings.items()} - log_verbose_level = int(log_settings.get("log_verbose_level", 0)) if log_settings is not None else 0 - if log_verbose_level > 0: - LOGGER.warning( - f"Triton Inference Server is running with enabled verbose logs (log_verbose_level={log_verbose_level}). " - "It may affect inference performance." - ) - - -class TritonBase: - """Base class for Triton Inference Server.""" - - def __init__(self, url: str, workspace: Union[Workspace, str, pathlib.Path, None] = None): - """Initialize TritonBase. - - Args: - url: Triton Inference Server URL in form of ://: - workspace: Workspace for storing communication sockets and the other temporary files. - """ - self._workspace = workspace if isinstance(workspace, Workspace) else Workspace(workspace) - self._url = url - self._model_manager = ModelManager(self._url) - self._cv = th.Condition() - self._triton_context = TritonContext() - self._log_level_checker = _LogLevelChecker(self._url) - - with self._cv: - self._stopped = True - self._connected = False - - atexit.register(self.stop) - - self._tensor_store = None - - def bind( - self, - model_name: str, - infer_func: Union[Callable, Sequence[Callable]], - inputs: Sequence[Tensor], - outputs: Sequence[Tensor], - model_version: int = 1, - config: Optional[ModelConfig] = None, - strict: bool = False, - ) -> None: - """Create a model with given name and inference callable binding into Triton Inference Server. - - More information about model configuration: - https://github.com/triton-inference-server/server/blob/main/docs/user_guide/model_configuration.md - - Args: - infer_func: Inference callable to handle request/response from Triton Inference Server - (or list of inference callable for multi instance model) - inputs: Definition of model inputs - outputs: Definition of model outputs - model_name: Name under which model is available in Triton Inference Server. It can only contain - alphanumeric characters, dots, underscores and dashes. - model_version: Version of model - config: Model configuration for Triton Inference Server deployment - strict: Enable strict validation between model config outputs and inference function result - """ - self._validate_model_name(model_name) - model = Model( - model_name=model_name, - model_version=model_version, - inference_fn=infer_func, - inputs=inputs, - outputs=outputs, - config=config if config else ModelConfig(), - workspace=self._workspace, - triton_context=self._triton_context, - strict=strict, - ) - model.on_model_event(self._on_model_event) - - self._model_manager.add_model(model, self.is_connected()) - - def connect(self) -> None: - """Connect to Triton Inference Server. - - Raises: - TimeoutError: if Triton Inference Server is not ready after timeout - """ - with self._cv: - if self._connected: - LOGGER.debug("Triton Inference already connected.") - return - - self._wait_for_server() - if self._tensor_store is None: - self._tensor_store = TensorStore(self._workspace.path / "data_store.sock") - self._tensor_store.start() - - self._model_manager.load_models() - self._wait_for_models() - self._connected = True - - def serve(self, monitoring_period_s: float = MONITORING_PERIOD_S) -> None: - """Run Triton Inference Server and lock thread for serving requests/response. - - Args: - monitoring_period_s: the timeout of monitoring if Triton and models are available. - Every monitoring_period_s seconds main thread wakes up and check if triton server and proxy backend - are still alive and sleep again. If triton or proxy is not alive - method returns. - """ - self.connect() - with self._cv: - try: - while self.is_alive(): - self._cv.wait(timeout=monitoring_period_s) - except KeyboardInterrupt: - LOGGER.info("SIGINT received, exiting.") - self.stop() - - def stop(self) -> bool: - """Stop Triton Inference Server and clean workspace.""" - with self._cv: - if self._stopped: - LOGGER.debug("Triton Inference already stopped.") - return False - self._stopped = True - self._connected = False - atexit.unregister(self.stop) - self._pre_stop_impl() - LOGGER.debug("Cleaning model manager, tensor store and workspace.") - self._model_manager.clean() - if self._tensor_store is not None: - self._tensor_store.close() - self._tensor_store = None - self._workspace.clean() - - with self._cv: - self._cv.notify_all() - LOGGER.debug("Stopped Triton Inference server and proxy backends") - self._log_level_checker.check(skip_update=True) - - return True - - def is_alive(self) -> bool: - """Check if Triton Inference Server is alive.""" - if not self._is_alive_impl(): - return False - - for model in self._model_manager.models: - if not model.is_alive(): - return False - return True - - def is_connected(self) -> bool: - """Check if Triton Inference Server is connected.""" - with self._cv: - return self._connected - - def __enter__(self): - """Connects to Triton server on __enter__. - - Returns: - A Triton object - """ - self.connect() - return self - - def __exit__(self, *_) -> None: - """Exit the context stopping the process and cleaning the workspace. - - Args: - *_: unused arguments - """ - self.stop() - - def _is_alive_impl(self) -> bool: - return True - - def _pre_stop_impl(self): - pass - - def _post_stop_impl(self): - pass - - def _wait_for_server(self) -> None: - """Wait for Triton Inference Server to be ready.""" - self._log_level_checker.check() - try: - with contextlib.closing(create_client_from_url(self._url)) as client: - wait_for_server_ready(client, timeout_s=DEFAULT_TRITON_STARTUP_TIMEOUT_S) - except TimeoutError as e: - LOGGER.warning( - f"Could not verify locally if Triton Inference Server is ready using {self._url}. " - "Please, check the server logs for details." - ) - raise TimeoutError("Triton Inference Server is not ready after timeout.") from e - - def _wait_for_models(self) -> None: - """Log loaded models in console to show the available endpoints.""" - self._log_level_checker.check() - - try: - for model in self._model_manager.models: - with ModelClient( - url=self._url, model_name=model.model_name, model_version=str(model.model_version) - ) as client: - # This waits for only tritonserver and lightweight proxy backend to be ready - # timeout should be short as model is loaded before execution of Triton.start() method - client.wait_for_model(timeout_s=WAIT_FORM_MODEL_TIMEOUT_S) - except TimeoutError: - LOGGER.warning( - f"Could not verify locally if models are ready using {self._url}. " - "Please, check the server logs for details." - ) - - for model in self._model_manager.models: - LOGGER.info(f"Infer function available as model: `{MODEL_URL.format(model_name=model.model_name)}`") - LOGGER.info(f" Status: `GET {MODEL_READY_URL.format(model_name=model.model_name)}`") - LOGGER.info(f" Model config: `GET {MODEL_CONFIG_URL.format(model_name=model.model_name)}`") - LOGGER.info(f" Inference: `POST {MODEL_INFER_URL.format(model_name=model.model_name)}`") - - LOGGER.info( - """Read more about configuring and serving models in """ - """documentation: https://triton-inference-server.github.io/pytriton.""" - ) - LOGGER.info(f"(Press CTRL+C or use the command `kill -SIGINT {os.getpid()}` to send a SIGINT signal and quit)") - - def _on_model_event(self, model: Model, event: ModelEvent, context: typing.Optional[typing.Any] = None): - LOGGER.info(f"Received {event} from {model}; context={context}") - - if event in [ModelEvent.RUNTIME_TERMINATING, ModelEvent.RUNTIME_TERMINATED]: - threading.Thread(target=self.stop).start() - - @classmethod - def _validate_model_name(cls, model_name: str) -> None: - """Validate model name. - - Args: - model_name: Model name - """ - if not model_name: - raise PyTritonValidationError("Model name cannot be empty") - - if not re.match(r"^[a-zA-Z0-9._-]+$", model_name): - raise PyTritonValidationError( - "Model name can only contain alphanumeric characters, dots, underscores and dashes" - ) - - -class Triton(TritonBase): - """Triton Inference Server for Python models.""" - - def __init__( - self, *, config: Optional[TritonConfig] = None, workspace: Union[Workspace, str, pathlib.Path, None] = None - ): - """Initialize Triton Inference Server context for starting server and loading models. - - Args: - config: TritonConfig object with optional customizations for Triton Inference Server. - Configuration can be passed also through environment variables. - See [TritonConfig.from_env()][pytriton.triton.TritonConfig.from_env] class method for details. - - Order of precedence: - - - config defined through `config` parameter of init method. - - config defined in environment variables - - default TritonConfig values - workspace: workspace or path where the Triton Model Store and files used by pytriton will be created. - If workspace is `None` random workspace will be created. - Workspace will be deleted in [Triton.stop()][pytriton.triton.Triton.stop]. - """ - - def _without_none_values(_d): - return {name: value for name, value in _d.items() if value is not None} - - default_config_dict = _without_none_values(TritonConfig().to_dict()) - env_config_dict = _without_none_values(TritonConfig.from_env().to_dict()) - explicit_config_dict = _without_none_values(config.to_dict() if config else {}) - config_dict = {**default_config_dict, **env_config_dict, **explicit_config_dict} - self._config = TritonConfig(**config_dict) - workspace_instance = workspace if isinstance(workspace, Workspace) else Workspace(workspace) - self._prepare_triton_config(workspace_instance) - endpoint_protocol = "http" if self._config.allow_http in [True, None] else "grpc" - super().__init__( - url=endpoint_utils.get_endpoint(self._triton_server_config, endpoint_protocol), - workspace=workspace_instance, - ) - self._triton_server = None - - def __enter__(self) -> "Triton": - """Entering the context launches the triton server. - - Returns: - A Triton object - """ - self._run_server() - super().__enter__() - return self - - def run(self) -> None: - """Run Triton Inference Server.""" - self._run_server() - self.connect() - - def serve(self, monitoring_period_s: float = MONITORING_PERIOD_S) -> None: - """Run Triton Inference Server and lock thread for serving requests/response. - - Args: - monitoring_period_s: the timeout of monitoring if Triton and models are available. - Every monitoring_period_s seconds main thread wakes up and check if triton server and proxy backend - are still alive and sleep again. If triton or proxy is not alive - method returns. - """ - self._run_server() - super().serve(monitoring_period_s=monitoring_period_s) - - def _initialize_server(self) -> None: - """Initialize Triton Inference Server before binary execution.""" - self._triton_inference_server_path = self._prepare_triton_inference_server() - self._triton_server = TritonServer( - path=(self._triton_inference_server_path / "bin" / "tritonserver").as_posix(), - libs_path=get_libs_path(), - config=self._triton_server_config, - ) - - url = ( - self._triton_server.get_endpoint("http") - if (self._config.allow_http is None or self._config.allow_http) - else self._triton_server.get_endpoint("grpc") - ) - self._log_level_checker = _LogLevelChecker(url) - - def _prepare_triton_config(self, workspace: Workspace) -> None: - self._triton_server_config = TritonServerConfig() - config_data = self._config.to_dict() - self._python_backend_config = PythonBackendConfig() - python_backend_config_data = { - "shm-region-prefix-name": self._shm_prefix(), - "shm-default-byte-size": INITIAL_BACKEND_SHM_SIZE, - "shm-growth-byte-size": GROWTH_BACKEND_SHM_SIZE, - } - for name, value in python_backend_config_data.items(): - if name not in PythonBackendConfig.allowed_keys() or value is None: - continue - - if isinstance(value, pathlib.Path): - value = value.as_posix() - self._python_backend_config[name] = value - for name, value in config_data.items(): - if name not in TritonServerConfig.allowed_keys() or value is None: - continue - - if isinstance(value, pathlib.Path): - value = value.as_posix() - self._triton_server_config[name] = value - - self._triton_server_config["model_control_mode"] = "explicit" - self._triton_server_config["backend_config"] = self._python_backend_config.to_list_args() - if "model_repository" not in self._triton_server_config: - self._triton_server_config["model_repository"] = workspace.path.as_posix() - - def _prepare_triton_inference_server(self) -> pathlib.Path: - """Prepare binaries and libraries of Triton Inference Server for execution. - - Return: - Path where Triton binaries are ready for execution - """ - triton_inference_server_path = self._workspace.path / "tritonserver" - - LOGGER.debug("Preparing Triton Inference Server binaries and libs for execution.") - shutil.copytree( - TRITONSERVER_DIST_DIR, - triton_inference_server_path, - ignore=shutil.ignore_patterns("python_backend_stubs", "triton_python_backend_stub"), - ) - LOGGER.debug(f"Triton Inference Server binaries copied to {triton_inference_server_path} without stubs.") - - major = sys.version_info[0] - minor = sys.version_info[1] - version = f"{major}.{minor}" - - src_stub_path = get_stub_path(version) - dst_stub_path = triton_inference_server_path / "backends" / "python" / "triton_python_backend_stub" - - LOGGER.debug(f"Copying stub for version {version} from {src_stub_path} to {dst_stub_path}") - shutil.copy(src_stub_path, dst_stub_path) - - LOGGER.debug(f"Triton Inference Server binaries ready in {triton_inference_server_path}") - - self._triton_server_config["backend_directory"] = (triton_inference_server_path / "backends").as_posix() - if "cache_directory" not in self._triton_server_config: - self._triton_server_config["cache_directory"] = (triton_inference_server_path / "caches").as_posix() - return triton_inference_server_path - - def _shm_prefix(self) -> str: - """Generate unique prefix for shm memory. - - Returns: - String with prefix - """ - hash = codecs.encode(os.urandom(4), "hex").decode() - pid = os.getpid() - return f"pytrtion{pid}-{hash}" - - def _run_server(self): - """Run Triton Inference Server.""" - if self._triton_server is None: - self._initialize_server() - if not self._triton_server.is_alive(): - with self._cv: - self._stopped = False - LOGGER.debug("Starting Triton Inference") - self._triton_server.register_on_exit(self._on_tritonserver_exit) - self._triton_server.start() - - def _is_alive_impl(self) -> bool: - """Verify is deployed models and server are alive. - - Returns: - True if server and loaded models are alive, False otherwise. - """ - if not self._triton_server: - return False - - return self._triton_server.is_alive() - - def _pre_stop_impl(self): - self._triton_server.unregister_on_exit(self._on_tritonserver_exit) - if self._triton_server is not None: - self._triton_server.stop() - - def _on_tritonserver_exit(self, *_) -> None: - """Handle the Triton Inference Server process exit. - - Args: - _: unused arguments - """ - LOGGER.debug("Got callback that tritonserver process finished") - self.stop() - - -class RemoteTriton(TritonBase): - """RemoteTriton connects to Triton Inference Server running on remote host.""" - - def __init__(self, url: str, workspace: Union[Workspace, str, pathlib.Path, None] = None): - """Initialize RemoteTriton. - - Args: - url: Triton Inference Server URL in form of ://: - If scheme is not provided, http is used as default. - If port is not provided, 8000 is used as default for http and 8001 for grpc. - workspace: path to be created where the files used by pytriton will be stored - (e.g. socket files for communication). - If workspace is `None` temporary workspace will be created. - Workspace should be created in shared filesystem space between RemoteTriton - and Triton Inference Server to allow access to socket files - (if you use containers, folder must be shared between containers). - - """ - super().__init__(url=TritonUrl.from_url(url).with_scheme, workspace=workspace) - - with self._cv: - self._stopped = False - - def __enter__(self) -> "RemoteTriton": - """Entering the context connects to remote Triton server. - - Returns: - A RemoteTriton object - """ - super().__enter__() - return self diff --git a/stf/stf-api-alternative/pytriton/pytriton/utils/__init__.py b/stf/stf-api-alternative/pytriton/pytriton/utils/__init__.py deleted file mode 100644 index 8010bd32129eb99ce3ce66981b81d3ba41bf287b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/utils/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# noqa: D104 diff --git a/stf/stf-api-alternative/pytriton/pytriton/utils/dataclasses.py b/stf/stf-api-alternative/pytriton/pytriton/utils/dataclasses.py deleted file mode 100644 index 2bb86bb71a7ffadf9745452d18f9604a68fbc8fe..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/utils/dataclasses.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Collection of utils for dataclasses.""" - -import wrapt - - -@wrapt.decorator -def kwonly_dataclass(wrapped, instance, args, kwargs): - """Poor dataclass wrapper to have init method keyword-only. - - Dataclass keyword-only arguments are available since Python 3.10. - - Example usage: - - @kwonly_dataclass - @dataclass.dataclasses - class MyDataClass: - a: int - b: str - - my_dataclass = MyDataClass(a=1, b="hello") - MyDataClass(1, "hello") # raises TypeError - """ - if args: - raise TypeError(f"{wrapped.__name__} initialization can't be used with positional arguments") - return wrapped(**kwargs) diff --git a/stf/stf-api-alternative/pytriton/pytriton/utils/distribution.py b/stf/stf-api-alternative/pytriton/pytriton/utils/distribution.py deleted file mode 100644 index 38405a640afa4a0e04fb4b65d0f7fec94ae6ec5a..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/utils/distribution.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Set of utils to obtain properties of pytriton distribution.""" -import logging -import pathlib -import site - -LOGGER = logging.getLogger(__name__) - - -def get_root_module_path() -> pathlib.Path: - """Obtain path to pytriton module. - - Returns: - Path to pytriton root module in site or if installed in editable model - local. - """ - pytriton_module_path = pathlib.Path(__file__).parent.parent - LOGGER.debug("Obtained pytriton module path: %s", pytriton_module_path) - return pytriton_module_path - - -def is_editable_install() -> bool: - """Checks if pytriton is installed in editable mode. - - Returns: - True if pytriton is installed in editable mode, False otherwise. - """ - editable_mode = True - site_packages = site.getsitepackages() + [site.getusersitepackages()] - pytriton_module_path = get_root_module_path() - for site_package in site_packages: - try: - pytriton_module_path.relative_to(site_package) - editable_mode = False - break - except ValueError: - pass - LOGGER.debug("pytriton is installed in editable mode: %s", editable_mode) - return editable_mode - - -def get_libs_path(): - """Obtains path to directory with external libraries required by library. - - Returns: - Path to directory with external libraries required by library. - """ - pytriton_module_path = get_root_module_path() - if is_editable_install(): - libs_path = pytriton_module_path / "tritonserver/external_libs" - else: - libs_path = pytriton_module_path.parent / "nvidia_pytriton.libs" - LOGGER.debug("Obtained nvidia_pytriton.libs path: %s", libs_path) - return libs_path - - -def get_stub_path(version: str): - """Obtains path stub file for provided Python interpreter version. - - Args: - version: Python interpreter version - - Returns: - Path to stub file for given Python version - """ - pytriton_module_path = get_root_module_path() - stub_path = pytriton_module_path / "tritonserver" / "python_backend_stubs" / version / "triton_python_backend_stub" - LOGGER.debug("Obtained pytriton stubs path for %s: %s", version, stub_path) - return stub_path diff --git a/stf/stf-api-alternative/pytriton/pytriton/utils/endpoint_utils.py b/stf/stf-api-alternative/pytriton/pytriton/utils/endpoint_utils.py deleted file mode 100644 index 43bb877271a4545c3309bb9319bea576e41b00c4..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/utils/endpoint_utils.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Endpoint url forming utilities module.""" -import re -from typing import Literal - -from pytriton.constants import DEFAULT_GRPC_PORT, DEFAULT_HTTP_PORT, DEFAULT_METRICS_PORT, TRITON_LOCAL_IP -from pytriton.server.triton_server_config import TritonServerConfig - - -def get_endpoint(server_config: TritonServerConfig, endpoint: Literal["http", "grpc", "metrics"]) -> str: - """Get endpoint url. - - Args: - server_config: TritonServerConfig object - endpoint: endpoint name - - Returns: - endpoint url in form of {protocol}://{host}:{port} - """ - protocols = {"http": "http", "grpc": "grpc", "metrics": "http"} - - def _obtain_address(key_names): - for key_name in key_names: - address = server_config[key_name] - if address and not re.match(r"^0+.0+.0+.0+$", address): - break - else: - address = TRITON_LOCAL_IP - - return address - - addresses = { - "http": _obtain_address(["http-address"]), - "grpc": _obtain_address(["grpc-address"]), - "metrics": _obtain_address(["metrics-address", "http-address"]), - } - ports = { - "http": server_config["http-port"] or DEFAULT_HTTP_PORT, - "grpc": server_config["grpc-port"] or DEFAULT_GRPC_PORT, - "metrics": server_config["metrics-port"] or DEFAULT_METRICS_PORT, - } - - return f"{protocols[endpoint]}://{addresses[endpoint]}:{ports[endpoint]}" diff --git a/stf/stf-api-alternative/pytriton/pytriton/utils/logging.py b/stf/stf-api-alternative/pytriton/pytriton/utils/logging.py deleted file mode 100644 index 0869deda8caef62ef7fabfdd3fa13da70a7a79f2..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/utils/logging.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Module with logging related utils.""" -import logging - - -def silence_3rd_party_loggers(): - """Silence 3rd party libraries which adds enormous number of log lines on DEBUG level.""" - logging.getLogger("sh.command").setLevel(logging.WARNING) - logging.getLogger("sh.stream_bufferer").setLevel(logging.WARNING) - logging.getLogger("sh.streamreader").setLevel(logging.WARNING) diff --git a/stf/stf-api-alternative/pytriton/pytriton/utils/workspace.py b/stf/stf-api-alternative/pytriton/pytriton/utils/workspace.py deleted file mode 100644 index d04f881f45174f7666c2e2a7a0b0c51f3addfe22..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/pytriton/utils/workspace.py +++ /dev/null @@ -1,101 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Workspace class for handling space to store artifacts.""" -import logging -import pathlib -import shutil -import tempfile -from typing import Optional, Union - -LOGGER = logging.getLogger(__name__) - - -class Workspace: - """Class for storing the workspace information.""" - - def __init__(self, workspace_path: Optional[Union[str, pathlib.Path]] = None): - """Initialize workspace in the provided path or create workspace in default location. - - Args: - workspace_path: Path to a directory where workspace has to be created (optional). - If not provided workspace with random name will be created in ~/.cache/pytriton directory. - - Raises: - FileExistsError: in case workspace already exists. - """ - if workspace_path is None: - from pytriton.constants import PYTRITON_HOME - - PYTRITON_HOME.mkdir(parents=True, exist_ok=True) - self._tmp_dir = tempfile.TemporaryDirectory(dir=PYTRITON_HOME, prefix="workspace_") - self._workspace_path = pathlib.Path(self._tmp_dir.name).resolve() - LOGGER.debug(f"Workspace path {self._workspace_path}") - else: - self._tmp_dir = None - self._workspace_path = pathlib.Path(workspace_path).resolve() - LOGGER.debug(f"Workspace path {self._workspace_path}") - self._workspace_path.mkdir(parents=True) - - @property - def path(self) -> pathlib.Path: - """Return path to the workspace. - - Returns: - Path object with location of workspace catalog - """ - return self._workspace_path - - def exists(self) -> bool: - """Verify if workspace catalog exists. - - Returns: - True if workspace catalog exists. False otherwise. - """ - return self._workspace_path.exists() - - def is_empty(self) -> bool: - """Verify if workspace contains any files or folders. - - Returns: - True if workspace is not empty. False otherwise. - """ - all_files = list(self.path.rglob("*")) - if len(all_files) == 0: - return True - for p in all_files: - rel_p = p.relative_to(self.path) - if rel_p.parts and not rel_p.parts[0].startswith("."): - return False - return True - - def clean(self) -> None: - """Clean workspace removing files and directories created inside including the workspace itself. - - Raises: - OSError - when workspace after performing cleanup operation is still not empty. - """ - LOGGER.debug(f"Cleaning workspace dir {self.path}") - - for child in self.path.rglob("*"): - rel_p = child.relative_to(self.path) - if len(rel_p.parts) == 0 or rel_p.parts[0].startswith("."): - continue - if child.is_dir(): - shutil.rmtree(child, ignore_errors=True) - else: - child.unlink() - if not self.is_empty(): - raise OSError(f"Could not clean {self.path} workspace") - if self.path.exists(): - self.path.rmdir() diff --git a/stf/stf-api-alternative/pytriton/scripts/Dockerfile.build b/stf/stf-api-alternative/pytriton/scripts/Dockerfile.build deleted file mode 100644 index 7d8205b70f4432f01ecd799684d9032d8a1893d1..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/scripts/Dockerfile.build +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -ARG FROM_IMAGE -FROM ${FROM_IMAGE} - -WORKDIR /opt/workspace - -ENV PYENV_ROOT=/root/.pyenv -ENV PATH=$PYENV_ROOT/shims:$PYENV_ROOT/bin:$PATH - -RUN apt update -y && apt install -y build-essential \ - cmake \ - make \ - rapidjson-dev \ - libarchive-dev \ - zlib1g-dev \ - libssl-dev \ - libsqlite3-dev \ - libbz2-dev \ - liblzma-dev \ - libffi-dev \ - libreadline-dev - -RUN curl https://pyenv.run | bash - -ADD scripts/build_python_stubs.sh /opt/workspace/build_python_stubs.sh -RUN bash -xe /opt/workspace/build_python_stubs.sh diff --git a/stf/stf-api-alternative/pytriton/scripts/add_libs_to_wheel.sh b/stf/stf-api-alternative/pytriton/scripts/add_libs_to_wheel.sh deleted file mode 100644 index f9f8e686fda46185550b14b0b21ebb4510fa6433..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/scripts/add_libs_to_wheel.sh +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -x -set -e - -SCRIPTS_DIR=$(dirname "$(realpath "${BASH_SOURCE[0]}")") - -PYTRITON_IMAGE_NAME="${1}" -TRITON_LOCAL_DIR=$(realpath "${2}") -WHEEL_PATH=$(realpath "${3}") -DIST_DIR="$(dirname "${WHEEL_PATH}")" - -DOCKER_PLATFORM=${4} -# get arch from DOCKER_PLATFORM -DOCKER_ARCH=$(echo "${DOCKER_PLATFORM}" | cut -d'/' -f2) -if [[ "$DOCKER_ARCH" == "amd64" ]]; then - WHEEL_ARCH=x86_64 -elif [[ "$DOCKER_ARCH" == "arm64" ]]; then - WHEEL_ARCH=aarch64 -fi -WHEEL_PLATFORM=manylinux_2_35_${WHEEL_ARCH} - -DOCKER_CONTAINER_ID=$(docker create --rm --platform "${DOCKER_PLATFORM}" -w "${PWD}" "${PYTRITON_IMAGE_NAME}" bash -c "sleep 1h") -docker start "${DOCKER_CONTAINER_ID}" - -docker exec "${DOCKER_CONTAINER_ID}" mkdir -p "${DIST_DIR}" -docker exec "${DOCKER_CONTAINER_ID}" mkdir -p "$(dirname "${TRITON_LOCAL_DIR}")" - -docker cp "${WHEEL_PATH}" "${DOCKER_CONTAINER_ID}:${WHEEL_PATH}" -docker cp "${TRITON_LOCAL_DIR}" "${DOCKER_CONTAINER_ID}:${TRITON_LOCAL_DIR}" - -docker exec "${DOCKER_CONTAINER_ID}" pip install auditwheel==5.3.0 patchelf==0.17.2 -docker cp "${SCRIPTS_DIR}/auditwheel_patched.py" "${DOCKER_CONTAINER_ID}:/tmp/" -docker exec "${DOCKER_CONTAINER_ID}" bash -c "LD_LIBRARY_PATH=${TRITON_LOCAL_DIR}/external_libs /tmp/auditwheel_patched.py -vvvv repair --plat ${WHEEL_PLATFORM} ${WHEEL_PATH}" - -WHEELHOUSE_DIR="$(dirname "${DIST_DIR}")/wheelhouse" -RESULT_WHEEL_PATH=$(docker exec "${DOCKER_CONTAINER_ID}" bash -c "find ${WHEELHOUSE_DIR} -type f -name *.whl") -docker cp "${DOCKER_CONTAINER_ID}:${RESULT_WHEEL_PATH}" "${DIST_DIR}" - -docker stop "${DOCKER_CONTAINER_ID}" diff --git a/stf/stf-api-alternative/pytriton/scripts/auditwheel_patched.py b/stf/stf-api-alternative/pytriton/scripts/auditwheel_patched.py deleted file mode 100644 index 49cd937a2b93fec3cc92aa9121fecab2d273108f..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/scripts/auditwheel_patched.py +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Patched auditwheel script. - -Patch changes behavior of auditwheel to not remove libpython from the wheel -as it is python interpreter library required by python backend. -""" -import re - -import auditwheel.main # noqa -import auditwheel.policy.external_references -from auditwheel.policy import _POLICIES as POLICIES - -# to not remove libpython from the wheel as it is python interpreter library required by python backend -# used here: https://github.com/pypa/auditwheel/blob/main/src/auditwheel/policy/external_references.py#L28 -auditwheel.policy.external_references.LIBPYTHON_RE = re.compile(r"__libpython\d\.\d\.\d\.so") - -# Policies to ignore attaching Python libraries to wheel during fixing dependencies -for p in POLICIES: - for version in ["3.8", "3.9", "3.10", "3.11"]: - p["lib_whitelist"].append(f"libpython{version}.so.1.0") - -if __name__ == "__main__": - auditwheel.main.main() diff --git a/stf/stf-api-alternative/pytriton/scripts/build_python_stubs.sh b/stf/stf-api-alternative/pytriton/scripts/build_python_stubs.sh deleted file mode 100644 index 2bac4f40d48ef015b9f29193706339d45d39a67b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/scripts/build_python_stubs.sh +++ /dev/null @@ -1,82 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -set -xe - -if [[ -z ${NVIDIA_TRITON_SERVER_VERSION} ]]; then - echo "The NVIDIA_TRITON_SERVER_VERSION variable is not set." - echo "The script must be executed inside Triton Inference Server - nvcr.io/nvidia/tritonserver:xx.yy-pyt-python-py3" - exit 1 -else - echo "Found NVIDIA_TRITON_SERVER_VERSION=${NVIDIA_TRITON_SERVER_VERSION}." -fi - -export GIT_BRANCH_NAME="r${NVIDIA_TRITON_SERVER_VERSION}" - -PYTHON_VERSION=("3.8" "3.9" "3.10" "3.11") -for version in "${PYTHON_VERSION[@]}"; do - pyenv install -f ${version} -done - -echo "Preparing Python Backend Stubs directory in ${PYTHON_STUBS_DIR}" -PYTHON_STUBS_DIR=${PWD}/python_backend_stubs -if [[ -d "${PYTHON_STUBS_DIR}" ]]; then - echo "Removing existing directory ${PYTHON_STUBS_DIR}" - rm -rf "${PYTHON_STUBS_DIR}" -fi - -echo "Creating new stubs directory ${PYTHON_STUBS_DIR}" -mkdir "${PYTHON_STUBS_DIR}" - -echo "Preparing Python Backend directory" -PYTHON_BACKEND_DIR=${PWD}/python_backend -if [[ -d "${PYTHON_BACKEND_DIR}" ]]; then - echo "Removing existing Python Backend directory ${PYTHON_BACKEND_DIR}" - rm -rf "${PYTHON_BACKEND_DIR}" -fi - -echo "Cloning Python Backend branch ${GIT_BRANCH_NAME} to ${PYTHON_BACKEND_DIR}." -git clone https://github.com/triton-inference-server/python_backend -b "${GIT_BRANCH_NAME}" ${PYTHON_BACKEND_DIR} - -for version in "${PYTHON_VERSION[@]}"; do - echo "Building Python Backend Stub for Python version ${version}" - cd "${PYTHON_BACKEND_DIR}" - - echo "Revert the repository state" - git reset --hard && git clean --force -dfx - - echo "Create build directory for Python version ${version}" - mkdir build && cd build - - echo "Initialize Python for version ${version}" - pyenv global "${version}" - python --version - - echo "Preparing build files for Python version ${version}" - cmake -DTRITON_ENABLE_GPU=ON \ - -DTRITON_BACKEND_REPO_TAG="${GIT_BRANCH_NAME}" \ - -DTRITON_COMMON_REPO_TAG="${GIT_BRANCH_NAME}" \ - -DTRITON_CORE_REPO_TAG="${GIT_BRANCH_NAME}" \ - -DCMAKE_INSTALL_PREFIX:PATH="$(pwd)/install" .. - - echo "Building triton_python_backend_stub for Python version ${version}" - make triton-python-backend-stub - ldd triton_python_backend_stub - - CURRENT_STUB_DIR="${PYTHON_STUBS_DIR}/${version}" - echo "Moving stub for Python version ${version} to ${CURRENT_STUB_DIR}" - mkdir "${CURRENT_STUB_DIR}" - - mv triton_python_backend_stub "${CURRENT_STUB_DIR}"/triton_python_backend_stub -done diff --git a/stf/stf-api-alternative/pytriton/scripts/build_triton.sh b/stf/stf-api-alternative/pytriton/scripts/build_triton.sh deleted file mode 100644 index 7f901b7b69fe458d9fe350b494e3e559b4ea04ee..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/scripts/build_triton.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -export TRITON_SERVER_IMAGE=$1 -export PYTRITON_IMAGE_NAME=$2 -export PLATFORM=$3 - -set -x - -# check if docker image name has docker registry prefix to not try to pull development image -PULL_RESULT="1" -if [[ "${PYTRITON_IMAGE_NAME}" == *"/"* && "${PYTRITON_IMAGE_REBUILD}" != "1" ]]; then - docker pull -q --platform "${PLATFORM}" "${PYTRITON_IMAGE_NAME}" - PULL_RESULT=$? -fi - -# fetch base image earlier as in some environments there are issues with pulling base images while building -docker pull -q --platform "${PLATFORM}" "${TRITON_SERVER_IMAGE}" - -if [[ "${PULL_RESULT}" != "0" ]]; then - docker buildx build --force-rm \ - --platform "${PLATFORM}" \ - --build-arg FROM_IMAGE="${TRITON_SERVER_IMAGE}" \ - --file scripts/Dockerfile.build \ - --tag "${PYTRITON_IMAGE_NAME}" ${DOCKER_BUILD_ADDITIONAL_FLAGS} . -fi \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/scripts/build_wheel.sh b/stf/stf-api-alternative/pytriton/scripts/build_wheel.sh deleted file mode 100644 index 827265aafa8d785c0a81560c5eefdbc3cbeabd78..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/scripts/build_wheel.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -DOCKER_PLATFORM=${1} - -WHEEL_ARCH=$(echo ${DOCKER_PLATFORM} | sed -e 's/^linux\/amd64$/linux_x86_64/g' -e 's/^linux\/arm64$/linux_aarch64/g') -python3 -m build --wheel -C="--build-option=--plat-name" -C="--build-option=${WHEEL_ARCH}" . -python3 -m build --sdist . diff --git a/stf/stf-api-alternative/pytriton/scripts/extract_triton.sh b/stf/stf-api-alternative/pytriton/scripts/extract_triton.sh deleted file mode 100644 index 657d02f3980f0519d8234b0920dfe10f60e2c2ba..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/scripts/extract_triton.sh +++ /dev/null @@ -1,59 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -x - -export PYTRITON_IMAGE_NAME=$1 -export TARGET_DIR=$2 -export PLATFORM=$3 - -rm -rf "${TARGET_DIR}" - -export PYTRITON_CONTAINER_ID=$(docker create --platform "$PLATFORM" --rm "${PYTRITON_IMAGE_NAME}" bash -c "sleep 1h") -docker start "${PYTRITON_CONTAINER_ID}" - -mkdir -p "${TARGET_DIR}"/backends/python -mkdir -p "${TARGET_DIR}"/caches/local -docker cp "${PYTRITON_CONTAINER_ID}":/opt/tritonserver/bin "${TARGET_DIR}" -docker cp "${PYTRITON_CONTAINER_ID}":/opt/tritonserver/lib "${TARGET_DIR}/external_libs" -docker cp "${PYTRITON_CONTAINER_ID}":/opt/tritonserver/caches/local/libtritoncache_local.so "${TARGET_DIR}/caches/local" -docker cp "${PYTRITON_CONTAINER_ID}":/opt/tritonserver/backends/python/libtriton_python.so "${TARGET_DIR}"/backends/python -docker cp "${PYTRITON_CONTAINER_ID}":/opt/tritonserver/backends/python/triton_python_backend_utils.py "${TARGET_DIR}"/backends/python -docker cp "${PYTRITON_CONTAINER_ID}:/opt/workspace/python_backend_stubs" "${TARGET_DIR}" - -mkdir -p "${TARGET_DIR}"/external_libs -function extract_binary_dependencies() { - BINARY_PATH="${1}" - export BINARY_PATH - echo "==== Extracting dependencies of ${BINARY_PATH}" - DEPS_SYMLINKS=$(docker exec -e BINARY_PATH "${PYTRITON_CONTAINER_ID}" bash -c 'ldd ${BINARY_PATH} | awk "/=>/ {print \$3}" | sort -u | xargs realpath -s | sed "s/,\$/\n/"') - for DEP in ${DEPS_SYMLINKS} - do - docker cp "${PYTRITON_CONTAINER_ID}:${DEP}" "${TARGET_DIR}/external_libs" - done - DEPS_REALPATH=$(docker exec -e BINARY_PATH "${PYTRITON_CONTAINER_ID}" bash -c 'ldd ${BINARY_PATH} | awk "/=>/ {print \$3}" | sort -u | xargs realpath | sed "s/,\$/\n/"') - for DEP in ${DEPS_REALPATH} - do - docker cp "${PYTRITON_CONTAINER_ID}:${DEP}" "${TARGET_DIR}/external_libs" - done -} - -extract_binary_dependencies /opt/tritonserver/bin/tritonserver -extract_binary_dependencies /opt/tritonserver/lib/libtritonserver.so -extract_binary_dependencies /opt/tritonserver/caches/local/libtritoncache_local.so -extract_binary_dependencies /opt/tritonserver/backends/python/libtriton_python.so -extract_binary_dependencies /opt/tritonserver/backends/python/triton_python_backend_stub - -docker stop "${PYTRITON_CONTAINER_ID}" diff --git a/stf/stf-api-alternative/pytriton/scripts/rewrite_links_to_repo.py b/stf/stf-api-alternative/pytriton/scripts/rewrite_links_to_repo.py deleted file mode 100644 index ad0abe71d0a3f150d063ab8d1bd26cedc866ffc3..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/scripts/rewrite_links_to_repo.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Script with hook for replacing ../ markdown links in docs to git repo.""" -import logging -import os -import pathlib -import re - - -def on_page_markdown(markdown: str, config, **kwargs): - """Called on markdown content.""" - logging.basicConfig(level=logging.INFO, format="%(levelname)s\t - %(name)s: %(message)s") - logger = logging.getLogger("scripts.rewrite_links_to_repo") - - ref = _get_current_ref() - - def _replace(_md_path): - repo_url = config["repo_url"] - view_uri_template = config["view_uri_template"] - path = pathlib.Path("docs") / _md_path - path = path.resolve().relative_to(pathlib.Path.cwd()) - full_url = f"{repo_url}/{view_uri_template.format(ref=ref, path=path)}" - return full_url - - for md_path in _extract_external_link(markdown): - logger.info(f"replacing {md_path} -> {_replace(md_path)}") - markdown = markdown.replace(md_path, _replace(md_path)) - - return markdown - - -def _get_current_ref(): - ref = os.environ.get("CI_COMMIT_REF_NAME", None) - if ref is None: - import git - - try: - repo = git.Repo(".") - ref = repo.active_branch.name - except (git.InvalidGitRepositoryError, TypeError): - # TypeError thrown on non detached head - no active branch - ref = "main" - return ref - - -def _extract_external_link(markdown): - external_paths = [path_with_brackets[1:-1] for path_with_brackets in re.findall(r"\(\.\.\/.*\)", markdown)] - return list(set(external_paths)) diff --git a/stf/stf-api-alternative/pytriton/tests/__init__.py b/stf/stf-api-alternative/pytriton/tests/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_decoupled_mode/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_decoupled_mode/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_decoupled_mode/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_decoupled_mode/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_decoupled_mode/test.py deleted file mode 100644 index 6bc50c746b58970c8a98117359a3cfa35b37e758..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_decoupled_mode/test.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test metadata""" - -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", - "platforms": ["amd64", "arm64"], -} diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_decoupled_mode/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_decoupled_mode/test.sh deleted file mode 100644 index 41cddbefe31d20d8ccd26f11c76cc344596ca0f9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_decoupled_mode/test.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" - -pip install pytest-timeout numpy -pytest -svvv \ - --log-cli-level=DEBUG \ - --log-cli-format='%(asctime)s [%(levelname)s] [%(process)d:%(thread)d] %(name)s:%(lineno)d: %(message)s' \ - --timeout=25 \ - ${THIS_SCRIPT_DIR}/test_pytest.py \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_decoupled_mode/test_pytest.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_decoupled_mode/test_pytest.py deleted file mode 100644 index b440b330a031da124cc295126b7d02b88a6fbaa6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_decoupled_mode/test_pytest.py +++ /dev/null @@ -1,367 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of decoupled mode""" - -import contextlib -import logging -import socket -import time - -import numpy as np -import pytest - -from pytriton.client import DecoupledModelClient, ModelClient -from pytriton.client.exceptions import ( - PyTritonClientInferenceServerError, - PyTritonClientTimeoutError, - PyTritonClientValueError, -) -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton, TritonConfig - -_LOGGER = logging.getLogger(__name__) - -_SMALL_TIMEOUT = 0.1 -_GARGANTUAN_TIMEOUT = 10.0 -_WRONG_TIMEOUT = -1.0 -_CORRECT_REPEAT = 2 -_WRONG_REPEAT = -2 - - -@pytest.fixture(scope="function") -def find_free_ports(): - """Fixture to find free ports for gprc, http, and metrics""" - with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as grpc: - grpc.bind(("", 0)) - grpc.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as http: - http.bind(("", 0)) - http.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as metrics: - metrics.bind(("", 0)) - metrics.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - yield { - "grpc_port": grpc.getsockname()[1], - "http_port": http.getsockname()[1], - "metrics_port": metrics.getsockname()[1], - } - - -class TritonInstance: - - """Context manager to hold Triton instance and ports""" - - def __init__(self, grpc_port, http_port, metrics_port, model_name, infer_function, decoupled=True): - self.grpc_port = grpc_port - self.http_port = http_port - self.metrics_port = metrics_port - self.model_name = model_name - self.config = TritonConfig(http_port=http_port, grpc_port=grpc_port, metrics_port=metrics_port) - self.infer_function = infer_function - self.grpc_url = f"grpc://localhost:{self.grpc_port}" - self.http_url = f"http://localhost:{self.http_port}" - self.decoupled = decoupled - - def __enter__(self): - try: - _LOGGER.info("Checking if Triton server is already running.") - if self.decoupled: - DecoupledModelClient( - self.grpc_url, - self.model_name, - init_timeout_s=_SMALL_TIMEOUT, - inference_timeout_s=_SMALL_TIMEOUT, - lazy_init=False, - ) - else: - ModelClient( - self.grpc_url, - self.model_name, - init_timeout_s=_SMALL_TIMEOUT, - inference_timeout_s=_SMALL_TIMEOUT, - lazy_init=False, - ) - message = "Triton server already running." - _LOGGER.error(message) - raise RuntimeError(message) - except PyTritonClientTimeoutError: - _LOGGER.debug("Triton server not running.") - pass - self.triton = Triton(config=self.config) - _LOGGER.debug(f"Binding {self.model_name} model.") - self.triton.bind( - model_name=self.model_name, - infer_func=self.infer_function, - inputs=[ - Tensor(dtype=np.float64, shape=(-1,)), - Tensor(dtype=np.int64, shape=(-1,)), - ], - outputs=[ - Tensor(dtype=np.float64, shape=(-1,)), - Tensor(dtype=np.int64, shape=(-1,)), - ], - config=ModelConfig(decoupled=self.decoupled), - strict=True, - ) - _LOGGER.info("Running Triton server.") - self.triton.run() - return self - - def __exit__(self, exc_type, exc_value, traceback): - _LOGGER.debug("Triton server stopping.") - self.triton.stop() - _LOGGER.debug("Checking if Triton server is still running.") - while True: - try: - with ModelClient( - self.http_url, - self.model_name, - init_timeout_s=_SMALL_TIMEOUT, - inference_timeout_s=_SMALL_TIMEOUT, - lazy_init=False, - ) as client: - _LOGGER.info(f"Triton server still running. {client.model_config}") - except PyTritonClientTimeoutError: - _LOGGER.debug("Triton server not running.") - break - _LOGGER.debug(f"Triton server still alive, so sleeping for {_SMALL_TIMEOUT}s.") - time.sleep(_SMALL_TIMEOUT) - _LOGGER.info("Triton server stopped.") - - -@pytest.fixture(scope="function") -def triton_decoupled_server(find_free_ports): - @batch - def _infer_fn(**inputs): # noqa: N803 - _LOGGER.debug(f"Inputs: {inputs}") - sleep_time = inputs["INPUT_1"].squeeze().item() - response_counter = inputs["INPUT_2"].squeeze().item() - if sleep_time < 0: - raise ValueError("Sleep time must be positive.") - if response_counter < 0: - response_counter = -response_counter - _LOGGER.info(f"Sleeper will raise ValueError after {response_counter} responses.") - for _ in range(response_counter): - _LOGGER.info(f"Will sleep {sleep_time}s") - time.sleep(sleep_time) - return_value = { - "OUTPUT_1": inputs["INPUT_1"], - "OUTPUT_2": inputs["INPUT_2"], - } - _LOGGER.debug(f"Yield value {return_value}") - yield return_value - time.sleep(sleep_time) - _LOGGER.info(f"Will sleep {sleep_time}s") - raise ValueError("Response counter must be positive.") - else: - _LOGGER.info(f"Sleeper will succed after {response_counter} responses.") - for _ in range(response_counter): - _LOGGER.info(f"Will sleep {sleep_time}s") - time.sleep(sleep_time) - return_value = { - "OUTPUT_1": inputs["INPUT_1"], - "OUTPUT_2": inputs["INPUT_2"], - } - _LOGGER.debug(f"Yield value {return_value}") - yield return_value - - _LOGGER.debug(f"Using ports: grpc={find_free_ports}") - with TritonInstance(**find_free_ports, model_name="Sleeper", infer_function=_infer_fn, decoupled=True) as triton: - yield triton - - -@pytest.fixture(scope="function") -def triton_coupled_server(find_free_ports): - @batch - def _infer_fn(**inputs): # noqa: N803 - _LOGGER.debug(f"Inputs: {inputs}") - sleep_time = inputs["INPUT_1"].squeeze().item() - if sleep_time < 0: - raise ValueError("Sleep time must be positive.") - _LOGGER.info(f"Will sleep {sleep_time}s") - time.sleep(sleep_time) - return_value = { - "OUTPUT_1": inputs["INPUT_1"], - "OUTPUT_2": inputs["INPUT_2"], - } - _LOGGER.debug(f"Yield value {return_value}") - return return_value - - _LOGGER.debug(f"Using ports: grpc={find_free_ports}") - with TritonInstance(**find_free_ports, model_name="Sleeper", infer_function=_infer_fn, decoupled=False) as triton: - yield triton - - -@pytest.fixture(scope="function") -def grpc_decoupled_client_server(triton_decoupled_server): - _LOGGER.debug( - f"Preparing client for {triton_decoupled_server.grpc_url} with init timeout {_GARGANTUAN_TIMEOUT} and inference timeout {_SMALL_TIMEOUT}." - ) - yield DecoupledModelClient( - url=triton_decoupled_server.grpc_url, - model_name=triton_decoupled_server.model_name, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_SMALL_TIMEOUT, - ) - - -@pytest.fixture(scope="function") -def grpc_decoupled_client_coupled_server(triton_coupled_server): - _LOGGER.debug( - f"Preparing client for {triton_coupled_server.grpc_url} with init timeout {_GARGANTUAN_TIMEOUT} and inference timeout {_SMALL_TIMEOUT}." - ) - yield DecoupledModelClient( - url=triton_coupled_server.grpc_url, - model_name=triton_coupled_server.model_name, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_SMALL_TIMEOUT, - ) - - -@pytest.fixture(scope="function") -def grpc_coupled_client_decoupled_server(triton_decoupled_server): - _LOGGER.debug( - f"Preparing client for {triton_decoupled_server.grpc_url} with init timeout {_GARGANTUAN_TIMEOUT} and inference timeout {_SMALL_TIMEOUT}." - ) - yield ModelClient( - url=triton_decoupled_server.grpc_url, - model_name=triton_decoupled_server.model_name, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_SMALL_TIMEOUT, - ) - - -@pytest.fixture(scope="function") -def http_coupled_client_decoupled_server(triton_decoupled_server): - _LOGGER.debug( - f"Preparing client for {triton_decoupled_server.grpc_url} with init timeout {_GARGANTUAN_TIMEOUT} and inference timeout {_SMALL_TIMEOUT}." - ) - yield ModelClient( - url=triton_decoupled_server.http_url, - model_name=triton_decoupled_server.model_name, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_SMALL_TIMEOUT, - ) - - -def test_coupled_infer_sample_failure_http(grpc_coupled_client_decoupled_server): - with pytest.raises(PyTritonClientInferenceServerError): - with grpc_coupled_client_decoupled_server as client: - client.infer_sample(np.array([_SMALL_TIMEOUT]), np.array([1])) - - -def test_coupled_infer_batch_failure_http(grpc_coupled_client_decoupled_server): - with pytest.raises(PyTritonClientInferenceServerError): - with grpc_coupled_client_decoupled_server as client: - client.infer_batch(np.array([[_SMALL_TIMEOUT]]), np.array([[1]])) - - -def test_decoupled_init_failure_http(triton_decoupled_server): - with pytest.raises(PyTritonClientValueError): - DecoupledModelClient( - url=triton_decoupled_server.http_url, - model_name=triton_decoupled_server.model_name, - ) - - -def test_decoupled_infer_sample_success_grpc(grpc_decoupled_client_server): - with grpc_decoupled_client_server as client: - responses = list(client.infer_sample(np.array([_SMALL_TIMEOUT]), np.array([_CORRECT_REPEAT]))) - assert len(responses) == 2 - assert responses[0]["OUTPUT_1"] == _SMALL_TIMEOUT - assert responses[0]["OUTPUT_2"] == _CORRECT_REPEAT - assert responses[1]["OUTPUT_1"] == _SMALL_TIMEOUT - assert responses[1]["OUTPUT_2"] == _CORRECT_REPEAT - - -def test_decoupled_infer_batch_success_grpc(grpc_decoupled_client_server): - with grpc_decoupled_client_server as client: - responses = list(client.infer_batch(np.array([[_SMALL_TIMEOUT]]), np.array([[_CORRECT_REPEAT]]))) - assert len(responses) == 2 - assert responses[0]["OUTPUT_1"] == _SMALL_TIMEOUT - assert responses[0]["OUTPUT_2"] == _CORRECT_REPEAT - assert responses[1]["OUTPUT_1"] == _SMALL_TIMEOUT - assert responses[1]["OUTPUT_2"] == _CORRECT_REPEAT - - -def test_decoupled_infer_sample_failure_coupled_server_grpc(grpc_decoupled_client_coupled_server): - with grpc_decoupled_client_coupled_server as client: - with pytest.raises(PyTritonClientInferenceServerError): - client.infer_sample(np.array([_SMALL_TIMEOUT]), np.array([_CORRECT_REPEAT])) - - -def test_decoupled_infer_batch_failure_coupled_server_grpc(grpc_decoupled_client_coupled_server): - with grpc_decoupled_client_coupled_server as client: - with pytest.raises(PyTritonClientInferenceServerError): - client.infer_batch(np.array([[_SMALL_TIMEOUT]]), np.array([[_CORRECT_REPEAT]])) - - -def test_decoupled_infer_sample_fast_failure_no_iter_grpc(grpc_decoupled_client_server): - with grpc_decoupled_client_server as client: - client.infer_sample(np.array([_WRONG_TIMEOUT]), np.array([_CORRECT_REPEAT])) - - -def test_decoupled_infer_sample_fast_failure_iter_grpc(grpc_decoupled_client_server): - with grpc_decoupled_client_server as client: - with pytest.raises(PyTritonClientInferenceServerError): - list(client.infer_sample(np.array([_WRONG_TIMEOUT]), np.array([_CORRECT_REPEAT]))) - - -def test_decoupled_infer_sample_double_infer_grpc(grpc_decoupled_client_server): - with grpc_decoupled_client_server as client: - with pytest.raises(PyTritonClientInferenceServerError): - client.infer_sample(np.array([_SMALL_TIMEOUT]), np.array([_CORRECT_REPEAT])) - client.infer_sample(np.array([_SMALL_TIMEOUT]), np.array([_CORRECT_REPEAT])) - - -def test_decoupled_infer_batch_fine_after_failure(grpc_decoupled_client_server): - with grpc_decoupled_client_server as client: - with pytest.raises(PyTritonClientInferenceServerError): - list(client.infer_batch(np.array([_WRONG_TIMEOUT]), np.array([_CORRECT_REPEAT]))) - responses = list(client.infer_batch(np.array([[_SMALL_TIMEOUT]]), np.array([[_CORRECT_REPEAT]]))) - assert len(responses) == 2 - assert responses[0]["OUTPUT_1"] == _SMALL_TIMEOUT - assert responses[0]["OUTPUT_2"] == _CORRECT_REPEAT - assert responses[1]["OUTPUT_1"] == _SMALL_TIMEOUT - assert responses[1]["OUTPUT_2"] == _CORRECT_REPEAT - - -def test_decoupled_infer_sample_close_before_stream_ends(grpc_decoupled_client_server): - grpc_decoupled_client_server.infer_sample(np.array([_SMALL_TIMEOUT]), np.array([_CORRECT_REPEAT])) - grpc_decoupled_client_server.close() - - -def test_decoupled_infer_sample_close_before_stream_fails_instantly(grpc_decoupled_client_server): - grpc_decoupled_client_server.infer_sample(np.array([_WRONG_TIMEOUT]), np.array([_CORRECT_REPEAT])) - grpc_decoupled_client_server.close() - - -def test_decoupled_infer_sample_close_before_stream_fails_in_stream(grpc_decoupled_client_server): - grpc_decoupled_client_server.infer_sample(np.array([_SMALL_TIMEOUT]), np.array([_WRONG_REPEAT])) - grpc_decoupled_client_server.close() - - -def test_decoupled_infer_sample_slow_failure_iter_grpc(grpc_decoupled_client_server): - with grpc_decoupled_client_server as client: - iterator = client.infer_sample(np.array([_SMALL_TIMEOUT]), np.array([_WRONG_REPEAT])) - first_result = next(iterator) - assert first_result["OUTPUT_1"] == _SMALL_TIMEOUT - assert first_result["OUTPUT_2"] == _WRONG_REPEAT - second_result = next(iterator) - assert second_result["OUTPUT_1"] == _SMALL_TIMEOUT - assert second_result["OUTPUT_2"] == _WRONG_REPEAT - with pytest.raises(PyTritonClientInferenceServerError): - next(iterator) diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_add_sub_python/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_add_sub_python/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_add_sub_python/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_add_sub_python/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_add_sub_python/test.py deleted file mode 100644 index 1a84ca746c87293e6f78e518909d421cca9b7c78..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_add_sub_python/test.py +++ /dev/null @@ -1,96 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of add_sub_python example""" -import argparse -import logging -import signal -import sys -import time - -from tests.utils import ( - DEFAULT_LOG_FORMAT, - ScriptThread, - get_current_container_version, - search_warning_on_too_verbose_log_level, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", - "platforms": ["amd64", "arm64"], -} - - -def verify_client_output(client_output): - output1_match = "add: [[2.0], [2.0]]" in client_output - output2_match = "sub: [[0.0], [0.0]]" in client_output - if not (output1_match and output2_match): - raise ValueError("Couldn't find expected result") - else: - LOGGER.info("Results matches expected results") - - -def main(): - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested("examples/add_sub_python/README.md", docker_image_with_name) - - install_cmd = ["bash", "examples/add_sub_python/install.sh"] - with ScriptThread(install_cmd, name="install") as install_thread: - install_thread.join() - - if install_thread.returncode != 0: - raise RuntimeError(f"Install thread returned {install_thread.returncode}") - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - server_cmd = ["python", "examples/add_sub_python/server.py"] - client_cmd = ["python", "examples/add_sub_python/client.py"] - - with ScriptThread(server_cmd, name="server") as server_thread: - with ScriptThread(client_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - - LOGGER.info("Interrupting server script process") - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if client_thread.returncode != 0: - raise RuntimeError(f"Client returned {client_thread.returncode}") - if server_thread.returncode not in [0, -2]: # -2 is returned when process finished after receiving SIGINT signal - raise RuntimeError(f"Server returned {server_thread.returncode}") - - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - - verify_client_output(client_thread.output) - assert not search_warning_on_too_verbose_log_level(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_add_sub_python/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_add_sub_python/test.sh deleted file mode 100644 index d3fa045b8646339995fda4433fe19e2034d34d15..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_add_sub_python/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 300 \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_add_sub_python_with_optionals/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_add_sub_python_with_optionals/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_add_sub_python_with_optionals/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_add_sub_python_with_optionals/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_add_sub_python_with_optionals/test.py deleted file mode 100644 index 3ac17c5040e803da29dae1828e3b9845db3629b6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_add_sub_python_with_optionals/test.py +++ /dev/null @@ -1,111 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of add_sub_python_with_optionals example""" -import argparse -import logging -import signal -import sys -import time - -from tests.utils import ( - DEFAULT_LOG_FORMAT, - ScriptThread, - get_current_container_version, - search_warning_on_too_verbose_log_level, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", - "platforms": ["amd64", "arm64"], -} - -expected_results = [ - ["add: [[4.0], [4.0]]", "sub: [[2.0], [2.0]]"], - ["add: [[6.0], [6.0]]", "sub: [[4.0], [4.0]]"], - ["add: [[7.0], [7.0]]", "sub: [[5.0], [5.0]]"], - ["add: [[9.0], [9.0]]", "sub: [[7.0], [7.0]]"], -] - - -def verify_client_output(client_output): - res = client_output.split("Received inference responses") - chunks = res[-1].split("------------------------")[:-1] - - if len(chunks) != len(expected_results): - raise ValueError("Couldn't find expected result") - - for out_chunk, expecte_res in zip(chunks, expected_results): - for expected in expecte_res: - if expected not in out_chunk: - raise ValueError("Couldn't find expected result") - - LOGGER.info("Results matches expected results") - - -def main(): - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested( - "examples/add_sub_python_with_optional/README.md", docker_image_with_name - ) - - install_cmd = ["bash", "examples/add_sub_python_with_optional/install.sh"] - with ScriptThread(install_cmd, name="install") as install_thread: - install_thread.join() - - if install_thread.returncode != 0: - raise RuntimeError(f"Install thread returned {install_thread.returncode}") - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - server_cmd = ["python", "examples/add_sub_python_with_optional/server.py"] - client_cmd = ["python", "examples/add_sub_python_with_optional/client.py"] - - with ScriptThread(server_cmd, name="server") as server_thread: - with ScriptThread(client_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - - LOGGER.info("Interrupting server script process") - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if client_thread.returncode != 0: - raise RuntimeError(f"Client returned {client_thread.returncode}") - if server_thread.returncode not in [0, -2]: # -2 is returned when process finished after receiving SIGINT signal - raise RuntimeError(f"Server returned {server_thread.returncode}") - - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - - verify_client_output(client_thread.output) - assert not search_warning_on_too_verbose_log_level(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_add_sub_python_with_optionals/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_add_sub_python_with_optionals/test.sh deleted file mode 100644 index 5590e9cd136b723d11306b24a2ea709c69de6aa5..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_add_sub_python_with_optionals/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 300 \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_dali_resnet101_pytorch/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_dali_resnet101_pytorch/__init__.py deleted file mode 100644 index dbfe137c14d6287870b58c675131cc1d4284c683..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_dali_resnet101_pytorch/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_dali_resnet101_pytorch/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_dali_resnet101_pytorch/test.py deleted file mode 100644 index 5dc2d81bbc944a4b1583170051580ca6f309beb1..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_dali_resnet101_pytorch/test.py +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import argparse -import logging -import signal -import sys -import time - -from tests.utils import ( - DEFAULT_LOG_FORMAT, - ScriptThread, - get_current_container_version, - search_warning_on_too_verbose_log_level, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", - "shared_memory_size_mb": 256, -} - - -def main(): - parser = argparse.ArgumentParser(description="Test for dali_resnet101_pytorch example.") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested("examples/dali_resnet101_pytorch/README.md", docker_image_with_name) - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - server_cmd = ["python", "examples/dali_resnet101_pytorch/server.py"] - client_cmd = ["python", "examples/dali_resnet101_pytorch/client.py"] - - with ScriptThread(server_cmd, name="server") as server_thread: - with ScriptThread(client_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - - LOGGER.info("Interrupting server script process") - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if client_thread.returncode != 0: - raise RuntimeError(f"Client returned {client_thread.returncode}") - if server_thread.returncode not in [0, -2]: # -2 is returned when process finished after receiving SIGINT signal - raise RuntimeError(f"Server returned {server_thread.returncode}") - - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - - assert not search_warning_on_too_verbose_log_level(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_dali_resnet101_pytorch/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_dali_resnet101_pytorch/test.sh deleted file mode 100644 index 251f16479662f370f2e77281be251c51ed65f285..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_dali_resnet101_pytorch/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 300 \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_bart_pytorch/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_bart_pytorch/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_bart_pytorch/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_bart_pytorch/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_bart_pytorch/test.py deleted file mode 100644 index 414612522fba4d7bb1a3206edb740aedc5bffb9d..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_bart_pytorch/test.py +++ /dev/null @@ -1,97 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of huggingface_bart_pytorch example""" -import argparse -import logging -import re -import signal -import sys -import time - -from tests.utils import ( - DEFAULT_LOG_FORMAT, - ScriptThread, - get_current_container_version, - search_warning_on_too_verbose_log_level, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", -} - - -def verify_client_output(client_output): - expected_pattern = r"label: \[\[b'travel'\], \[b'cooking'\], \[b'festival'\], \[b'literature'\]\]" - output_match = re.search(expected_pattern, client_output, re.MULTILINE) - output_array = output_match.group(0) if output_match else None - if not output_array: - raise ValueError(f"Could not find {expected_pattern} in client output") - else: - LOGGER.info(f'Found "{expected_pattern}" in client output') - - -def main(): - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested("examples/huggingface_bart_pytorch//README.md", docker_image_with_name) - - install_cmd = ["bash", "examples/huggingface_bart_pytorch/install.sh"] - with ScriptThread(install_cmd, name="install") as install_thread: - install_thread.join() - - if install_thread.returncode != 0: - raise RuntimeError(f"Install thread returned {install_thread.returncode}") - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - server_cmd = ["python", "examples/huggingface_bart_pytorch/server.py", "--verbose"] - client_cmd = ["python", "examples/huggingface_bart_pytorch/client.py", "--verbose"] - - with ScriptThread(server_cmd, name="server") as server_thread: - with ScriptThread(client_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - - LOGGER.info("Interrupting server script process") - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if client_thread.returncode != 0: - raise RuntimeError(f"Client returned {client_thread.returncode}") - if server_thread.returncode not in [0, -2]: # -2 is returned when process finished after receiving SIGINT signal - raise RuntimeError(f"Server returned {server_thread.returncode}") - - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - - verify_client_output(client_thread.output) - assert not search_warning_on_too_verbose_log_level(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_bart_pytorch/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_bart_pytorch/test.sh deleted file mode 100644 index d3fa045b8646339995fda4433fe19e2034d34d15..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_bart_pytorch/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 300 \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_bert_jax/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_bert_jax/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_bert_jax/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_bert_jax/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_bert_jax/test.py deleted file mode 100644 index 59cea25c8b84772f29822d75bb6efa251ba7b82d..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_bert_jax/test.py +++ /dev/null @@ -1,95 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of huggingface_bert_jax example""" -import argparse -import logging -import re -import signal -import sys -import time - -from tests.utils import ( - DEFAULT_LOG_FORMAT, - ScriptThread, - get_current_container_version, - search_warning_on_too_verbose_log_level, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/tensorflow:{TEST_CONTAINER_VERSION}-tf2-py3", -} - - -def verify_client_output(client_output): - expected_pattern = r"last_hidden_state: \[\[\[-0.11.+,-0.19.*\]\]\]" - output_match = re.search(expected_pattern, client_output, re.MULTILINE) - output_array = output_match.group(0) if output_match else None - if not output_array: - raise ValueError(f"Could not find {expected_pattern} in client output") - else: - LOGGER.info(f'Found "{expected_pattern}" in client output') - - -def main(): - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested("examples/huggingface_bert_jax//README.md", docker_image_with_name) - - install_cmd = ["bash", "examples/huggingface_bert_jax/install.sh"] - with ScriptThread(install_cmd, name="install") as install_thread: - install_thread.join() - - if install_thread.returncode != 0: - raise RuntimeError(f"Install thread returned {install_thread.returncode}") - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - server_cmd = ["python", "examples/huggingface_bert_jax/server.py"] - client_cmd = ["python", "examples/huggingface_bert_jax/client.py"] - - with ScriptThread(server_cmd, name="server") as server_thread: - with ScriptThread(client_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - - LOGGER.info("Interrupting server script process") - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if client_thread.returncode != 0: - raise RuntimeError(f"Client returned {client_thread.returncode}") - if server_thread.returncode not in [0, -2]: # -2 is returned when process finished after receiving SIGINT signal - raise RuntimeError(f"Server returned {server_thread.returncode}") - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - verify_client_output(client_thread.output) - assert not search_warning_on_too_verbose_log_level(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_bert_jax/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_bert_jax/test.sh deleted file mode 100644 index e187a715344ce72f67a3179273a07359107f599b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_bert_jax/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 300 diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_dialogpt_streaming_pytorch/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_dialogpt_streaming_pytorch/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_dialogpt_streaming_pytorch/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_dialogpt_streaming_pytorch/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_dialogpt_streaming_pytorch/test.py deleted file mode 100644 index cf70f75a409ed33d50921d6f02a8723306100aad..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_dialogpt_streaming_pytorch/test.py +++ /dev/null @@ -1,109 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of huggingface_dialogpt_streaming_pytorch example""" -import argparse -import logging -import re -import signal -import sys -import time - -from tests.utils import ( - DEFAULT_LOG_FORMAT, - ScriptThread, - get_current_container_version, - search_warning_on_too_verbose_log_level, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", -} - - -def verify_client_output(client_output): - expected_pattern = r"I'm not sure if you're being sarcastic" - output_match = re.search(expected_pattern, client_output, re.DOTALL) - output_array = output_match.group(0) if output_match else None - if not output_array: - raise ValueError(f'Could not find "{expected_pattern}" in client output') - else: - LOGGER.info(f'Found "{expected_pattern}" in client output') - - -def main(): - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested( - "examples/huggingface_dialogpt_streaming_pytorch/README.md", docker_image_with_name - ) - - install_cmd = ["bash", "examples/huggingface_dialogpt_streaming_pytorch/install.sh"] - with ScriptThread(install_cmd, name="install") as install_thread: - install_thread.join() - - if install_thread.returncode != 0: - raise RuntimeError(f"Install thread returned {install_thread.returncode}") - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - server_cmd = [ - "python", - "examples/huggingface_dialogpt_streaming_pytorch/server.py", - "--model-name", - "microsoft/dialoGPT-small", - ] - client_cmd = [ - "python", - "examples/huggingface_dialogpt_streaming_pytorch/client.py", - "--model-name", - "dialoGPT-small", - ] - - with ScriptThread(server_cmd, name="server") as server_thread: - with ScriptThread(client_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - - LOGGER.info("Interrupting server script process") - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if client_thread.returncode != 0: - raise RuntimeError(f"Client returned {client_thread.returncode}") - if server_thread.returncode not in [0, -2]: # -2 is returned when process finished after receiving SIGINT signal - raise RuntimeError(f"Server returned {server_thread.returncode}") - - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - - verify_client_output(client_thread.output) - assert not search_warning_on_too_verbose_log_level(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_dialogpt_streaming_pytorch/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_dialogpt_streaming_pytorch/test.sh deleted file mode 100644 index 3543f1d2b8a87f2f4876dcd7d169897b646da68f..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_dialogpt_streaming_pytorch/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 300 diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_opt_multinode_jax/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_opt_multinode_jax/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_opt_multinode_jax/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_opt_multinode_jax/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_opt_multinode_jax/test.py deleted file mode 100644 index 6966e4358717e107c56a1e4be2e623f99accfd12..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_opt_multinode_jax/test.py +++ /dev/null @@ -1,106 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of huggingface_opt_multinode_jax example""" -import argparse -import logging -import re -import signal -import sys -import time - -from tests.utils import ( - DEFAULT_LOG_FORMAT, - ScriptThread, - get_current_container_version, - search_warning_on_too_verbose_log_level, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/tensorflow:{TEST_CONTAINER_VERSION}-tf2-py3", -} - - -def verify_client_output(client_output): - expected_pattern = r"output: \['The capital of Poland is a'\]" - output_match = re.search(expected_pattern, client_output, re.MULTILINE) - output_array = output_match.group(0) if output_match else None - if not output_array: - raise ValueError(f"Could not find {expected_pattern} in client output") - else: - LOGGER.info(f'Found "{expected_pattern}" in client output') - - -def main(): - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested( - "examples/huggingface_opt_multinode_jax//README.md", docker_image_with_name - ) - - install_cmd = ["bash", "examples/huggingface_opt_multinode_jax/install.sh"] - with ScriptThread(install_cmd, name="install") as install_thread: - install_thread.join() - - if install_thread.returncode != 0: - raise RuntimeError(f"Install thread returned {install_thread.returncode}") - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - server_cmd = ["python", "examples/huggingface_opt_multinode_jax/server.py", "--model-name", "facebook/opt-125m"] - client_cmd = [ - "python", - "examples/huggingface_opt_multinode_jax/client.py", - "--input", - "The capital of Poland is", - "--output-length", - "1", - ] - - with ScriptThread(server_cmd, name="server") as server_thread: - with ScriptThread(client_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - - LOGGER.info("Interrupting server script process") - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if client_thread.returncode != 0: - raise RuntimeError(f"Client returned {client_thread.returncode}") - if server_thread.returncode not in [0, -2]: # -2 is returned when process finished after receiving SIGINT signal - raise RuntimeError(f"Server returned {server_thread.returncode}") - - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - - verify_client_output(client_thread.output) - assert not search_warning_on_too_verbose_log_level(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_opt_multinode_jax/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_opt_multinode_jax/test.sh deleted file mode 100644 index e187a715344ce72f67a3179273a07359107f599b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_opt_multinode_jax/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 300 diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_resnet_pytorch/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_resnet_pytorch/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_resnet_pytorch/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_resnet_pytorch/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_resnet_pytorch/test.py deleted file mode 100644 index 932759f603f96da3f6cc5d5adc00b3856248e740..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_resnet_pytorch/test.py +++ /dev/null @@ -1,99 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of huggingface_resnet_pytorch example""" -import argparse -import logging -import re -import signal -import sys -import time - -from tests.utils import ( - DEFAULT_LOG_FORMAT, - ScriptThread, - get_current_container_version, - search_warning_on_too_verbose_log_level, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", -} - - -def verify_client_output(client_output): - expected_pattern = r"Last result: \{'label': array\(\[b'tiger cat'\], dtype=object\)\}" - output_match = re.search(expected_pattern, client_output, re.MULTILINE) - output_array = output_match.group(0) if output_match else None - if not output_array: - raise ValueError(f"Could not find {expected_pattern} in client output") - else: - LOGGER.info(f'Found "{expected_pattern}" in client output') - - -def main(): - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested( - "examples/huggingface_resnet_pytorch//README.md", docker_image_with_name - ) - - install_cmd = ["bash", "examples/huggingface_resnet_pytorch/install.sh"] - with ScriptThread(install_cmd, name="install") as install_thread: - install_thread.join() - - if install_thread.returncode != 0: - raise RuntimeError(f"Install thread returned {install_thread.returncode}") - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - server_cmd = ["python", "examples/huggingface_resnet_pytorch/server.py"] - client_cmd = ["python", "examples/huggingface_resnet_pytorch/client.py"] - - with ScriptThread(server_cmd, name="server") as server_thread: - with ScriptThread(client_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - - LOGGER.info("Interrupting server script process") - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if client_thread.returncode != 0: - raise RuntimeError(f"Client returned {client_thread.returncode}") - if server_thread.returncode not in [0, -2]: # -2 is returned when process finished after receiving SIGINT signal - raise RuntimeError(f"Server returned {server_thread.returncode}") - - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - - verify_client_output(client_thread.output) - assert not search_warning_on_too_verbose_log_level(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_resnet_pytorch/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_resnet_pytorch/test.sh deleted file mode 100644 index d3fa045b8646339995fda4433fe19e2034d34d15..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_resnet_pytorch/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 300 \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_stable_diffusion_pytorch/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_stable_diffusion_pytorch/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_stable_diffusion_pytorch/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_stable_diffusion_pytorch/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_stable_diffusion_pytorch/test.py deleted file mode 100644 index 8c4ce1f21edcb96cc5bb2d8bdba9a69fd364253f..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_stable_diffusion_pytorch/test.py +++ /dev/null @@ -1,108 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of identity_python example""" -import argparse -import logging -import pathlib -import signal -import sys -import tempfile -import time - -from tests.utils import ( - DEFAULT_LOG_FORMAT, - ScriptThread, - get_current_container_version, - search_warning_on_too_verbose_log_level, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", -} - - -def verify_client_output(results_path): - file_path = results_path / "1" / "image.jpeg" - if not file_path.exists(): - raise ValueError(f"Unable to find the generated image at {file_path}.") - - -def main(): - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested( - "examples/huggingface_stable_diffusion//README.md", docker_image_with_name - ) - - install_cmd = ["bash", "examples/huggingface_stable_diffusion/install.sh"] - with ScriptThread(install_cmd, name="install") as install_thread: - install_thread.join() - - if install_thread.returncode != 0: - raise RuntimeError(f"Install thread returned {install_thread.returncode}") - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - with tempfile.TemporaryDirectory() as tempdir: - results_path = pathlib.Path(tempdir) - server_cmd = [ - "python", - "examples/huggingface_stable_diffusion/server.py", - ] - client_cmd = [ - "python", - "examples/huggingface_stable_diffusion/client.py", - "--results-path", - results_path.as_posix(), - ] - - with ScriptThread(server_cmd, name="server") as server_thread: - with ScriptThread(client_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - - LOGGER.info("Interrupting server script process") - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if client_thread.returncode != 0: - raise RuntimeError(f"Client returned {client_thread.returncode}") - if server_thread.returncode not in [ - 0, - -2, - ]: # -2 is returned when process finished after receiving SIGINT signal - raise RuntimeError(f"Server returned {server_thread.returncode}") - - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - verify_client_output(results_path) - assert not search_warning_on_too_verbose_log_level(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_stable_diffusion_pytorch/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_stable_diffusion_pytorch/test.sh deleted file mode 100644 index d3fa045b8646339995fda4433fe19e2034d34d15..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_huggingface_stable_diffusion_pytorch/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 300 \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_identity_python/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_identity_python/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_identity_python/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_identity_python/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_identity_python/test.py deleted file mode 100644 index 249db819e5163b15ffa53f5c280fdb499381012b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_identity_python/test.py +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of identity_python example""" -import argparse -import logging -import re -import signal -import sys -import time - -from tests.utils import ( - DEFAULT_LOG_FORMAT, - ScriptThread, - get_current_container_version, - search_warning_on_too_verbose_log_level, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", - "platforms": ["amd64", "arm64"], -} - - -def verify_client_output(client_output): - input1_match = re.search(r"INPUT_1: (.*)", client_output, re.MULTILINE) - input2_match = re.search(r"INPUT_2: (.*)", client_output, re.MULTILINE) - output1_match = re.search(r"OUTPUT_1: (.*)", client_output, re.MULTILINE) - output2_match = re.search(r"OUTPUT_2: (.*)", client_output, re.MULTILINE) - input1_array = input1_match.group(1) if input1_match else None - input2_array = input2_match.group(1) if input2_match else None - output1_array = output1_match.group(1) if output1_match else None - output2_array = output2_match.group(1) if output2_match else None - if not input1_array or input1_array != output1_array: - raise ValueError(f"input1_array: {input1_array} differs from output1_array: {output1_array}") - if not input2_array or input2_array != output2_array: - raise ValueError(f"input2_array: {input2_array} differs from output2_array: {output2_array}") - LOGGER.info("Input and output arrays matches") - - -def main(): - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested("examples/identity_python//README.md", docker_image_with_name) - - install_cmd = ["bash", "examples/identity_python/install.sh"] - with ScriptThread(install_cmd, name="install") as install_thread: - install_thread.join() - - if install_thread.returncode != 0: - raise RuntimeError(f"Install thread returned {install_thread.returncode}") - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - server_cmd = ["python", "examples/identity_python/server.py"] - client_cmd = ["python", "examples/identity_python/client.py"] - - with ScriptThread(server_cmd, name="server") as server_thread: - with ScriptThread(client_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - - LOGGER.info("Interrupting server script process") - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if client_thread.returncode != 0: - raise RuntimeError(f"Client returned {client_thread.returncode}") - if server_thread.returncode not in [0, -2]: # -2 is returned when process finished after receiving SIGINT signal - raise RuntimeError(f"Server returned {server_thread.returncode}") - - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - verify_client_output(client_thread.output) - assert not search_warning_on_too_verbose_log_level(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_identity_python/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_identity_python/test.sh deleted file mode 100644 index d3fa045b8646339995fda4433fe19e2034d34d15..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_identity_python/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 300 \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_linear_cupy/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_linear_cupy/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_linear_cupy/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_linear_cupy/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_linear_cupy/test.py deleted file mode 100644 index 3d4cdaffc7c5446d5ce4d58b7128a0dafc3e72d6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_linear_cupy/test.py +++ /dev/null @@ -1,97 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of linear_cupy example""" -import argparse -import logging -import signal -import sys -import time - -from tests.utils import ( - DEFAULT_LOG_FORMAT, - ScriptThread, - get_current_container_version, - search_warning_on_too_verbose_log_level, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", -} - - -def verify_client_output(client_output): - output1_match = ( - "result: [[3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0], [3.0, 4.0, 5.0, 6.0, 7.0, 8.0, " - "9.0, 10.0, 11.0, 12.0]]" in client_output - ) - if not output1_match: - raise ValueError("Couldn't find expected result") - else: - LOGGER.info("Results matches expected results") - - -def main(): - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested("examples/linear_cupy/README.md", docker_image_with_name) - - install_cmd = ["bash", "examples/linear_cupy/install.sh"] - with ScriptThread(install_cmd, name="install") as install_thread: - install_thread.join() - - if install_thread.returncode != 0: - raise RuntimeError(f"Install thread returned {install_thread.returncode}") - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - server_cmd = ["python", "examples/linear_cupy/server.py"] - client_cmd = ["python", "examples/linear_cupy/client.py"] - - with ScriptThread(server_cmd, name="server") as server_thread: - with ScriptThread(client_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - - LOGGER.info("Interrupting server script process") - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if client_thread.returncode != 0: - raise RuntimeError(f"Client returned {client_thread.returncode}") - if server_thread.returncode not in [0, -2]: # -2 is returned when process finished after receiving SIGINT signal - raise RuntimeError(f"Server returned {server_thread.returncode}") - - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - - verify_client_output(client_thread.output) - assert not search_warning_on_too_verbose_log_level(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_linear_cupy/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_linear_cupy/test.sh deleted file mode 100644 index d3fa045b8646339995fda4433fe19e2034d34d15..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_linear_cupy/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 300 \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_linear_random_pytorch/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_linear_random_pytorch/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_linear_random_pytorch/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_linear_random_pytorch/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_linear_random_pytorch/test.py deleted file mode 100644 index d98ef49f750f17f73c63819733fc94255d3f6d22..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_linear_random_pytorch/test.py +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of linear_random_pytorch example""" -import argparse -import logging -import signal -import sys -import time - -from tests.utils import ( - DEFAULT_LOG_FORMAT, - ScriptThread, - get_current_container_version, - search_warning_on_too_verbose_log_level, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", - "platforms": ["amd64", "arm64"], -} - - -def main(): - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested("examples/linear_random_pytorch/README.md", docker_image_with_name) - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - server_cmd = ["python", "examples/linear_random_pytorch/server.py"] - client_cmd = ["python", "examples/linear_random_pytorch/client.py"] - - with ScriptThread(server_cmd, name="server") as server_thread: - with ScriptThread(client_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - - LOGGER.info("Interrupting server script process") - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if client_thread.returncode != 0: - raise RuntimeError(f"Client returned {client_thread.returncode}") - if server_thread.returncode not in [0, -2]: # -2 is returned when process finished after receiving SIGINT signal - raise RuntimeError(f"Server returned {server_thread.returncode}") - - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - - assert not search_warning_on_too_verbose_log_level(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_linear_random_pytorch/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_linear_random_pytorch/test.sh deleted file mode 100644 index d3fa045b8646339995fda4433fe19e2034d34d15..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_linear_random_pytorch/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 300 \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_mlp_random_tensorflow2/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_mlp_random_tensorflow2/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_mlp_random_tensorflow2/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_mlp_random_tensorflow2/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_mlp_random_tensorflow2/test.py deleted file mode 100644 index 7dfd21e7f13b25cfa60f55080e5ee0ab71ce7890..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_mlp_random_tensorflow2/test.py +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of mlp_random_tensorflow2 example""" -import argparse -import logging -import signal -import sys -import time - -from tests.utils import ( - DEFAULT_LOG_FORMAT, - ScriptThread, - get_current_container_version, - search_warning_on_too_verbose_log_level, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/tensorflow:{TEST_CONTAINER_VERSION}-tf2-py3", - "shared_memory_size_mb": 256, -} - - -def main(): - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested("examples/mlp_random_tensorflow2/README.md", docker_image_with_name) - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - server_cmd = ["python", "examples/mlp_random_tensorflow2/server.py"] - client_cmd = ["python", "examples/mlp_random_tensorflow2/client.py"] - - with ScriptThread(server_cmd, name="server") as server_thread: - with ScriptThread(client_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - - LOGGER.info("Interrupting server script process") - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if client_thread.returncode != 0: - raise RuntimeError(f"Client returned {client_thread.returncode}") - if server_thread.returncode not in [0, -2]: # -2 is returned when process finished after receiving SIGINT signal - raise RuntimeError(f"Server returned {server_thread.returncode}") - - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - - assert not search_warning_on_too_verbose_log_level(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_mlp_random_tensorflow2/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_mlp_random_tensorflow2/test.sh deleted file mode 100644 index e187a715344ce72f67a3179273a07359107f599b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_mlp_random_tensorflow2/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 300 diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_multi_instance_resnet50_pytorch/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_multi_instance_resnet50_pytorch/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_multi_instance_resnet50_pytorch/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_multi_instance_resnet50_pytorch/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_multi_instance_resnet50_pytorch/test.py deleted file mode 100644 index 2b0ec2511ef3e0c2e916bb9c38f6ea26c3e8b608..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_multi_instance_resnet50_pytorch/test.py +++ /dev/null @@ -1,110 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of multi_instance_linear_pytorch example""" -import argparse -import logging -import re -import signal -import sys -import time - -from tests.utils import ( - DEFAULT_LOG_FORMAT, - ScriptThread, - get_current_container_version, - search_warning_on_too_verbose_log_level, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", - "shared_memory_size_mb": 256, -} - - -def verify_client_output(client_output): - expected_patterns = [ - "Request concurrency: 64", - r"Pass \[1\] throughput: \d+.\d+ infer/sec\. Avg latency: \d+ usec \(std \d+ usec\)", - r"Pass \[2\] throughput: \d+.\d+ infer/sec\. Avg latency: \d+ usec \(std \d+ usec\)", - r"Pass \[3\] throughput: \d+.\d+ infer/sec\. Avg latency: \d+ usec \(std \d+ usec\)", - ] - - for expected_pattern in expected_patterns: - output_match = re.search(expected_pattern, client_output, re.MULTILINE) - output_array = output_match.group(0) if output_match else None - if not output_array: - raise ValueError(f"Could not find {expected_pattern} in client output") - else: - LOGGER.info(f'Found "{expected_pattern}" in client output') - - -def main(): - from pytriton.client import ModelClient - - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested( - "examples/multi_instance_resnet50_pytorch/README.md", docker_image_with_name - ) - - install_cmd = ["bash", "examples/multi_instance_resnet50_pytorch/install.sh"] - with ScriptThread(install_cmd, name="install") as install_thread: - install_thread.join() - - if install_thread.returncode != 0: - raise RuntimeError(f"Install thread returned {install_thread.returncode}") - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - server_cmd = ["python", "examples/multi_instance_resnet50_pytorch/server.py"] - client_cmd = ["bash", "examples/multi_instance_resnet50_pytorch/client.sh"] - - with ScriptThread(server_cmd, name="server") as server_thread: - ModelClient("localhost", "ResNet50").wait_for_model(timeout_s=args.timeout_s) - with ScriptThread(client_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - - LOGGER.info("Interrupting server script process") - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if client_thread.returncode != 0: - LOGGER.warning(f"Client returned {client_thread.returncode}") - if server_thread.returncode not in [0, -2]: # -2 is returned when process finished after receiving SIGINT signal - raise RuntimeError(f"Server returned {server_thread.returncode}") - - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - - verify_client_output(client_thread.output) - assert not search_warning_on_too_verbose_log_level(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_multi_instance_resnet50_pytorch/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_multi_instance_resnet50_pytorch/test.sh deleted file mode 100644 index 5590e9cd136b723d11306b24a2ea709c69de6aa5..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_multi_instance_resnet50_pytorch/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 300 \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_multiple_models_example/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_multiple_models_example/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_multiple_models_example/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_multiple_models_example/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_multiple_models_example/test.py deleted file mode 100644 index 730cc6955f0670e003771c3a11f260ac3ae9eade..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_multiple_models_example/test.py +++ /dev/null @@ -1,96 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of multiple_models_python example""" -import argparse -import logging -import signal -import sys -import time - -from tests.utils import ( - DEFAULT_LOG_FORMAT, - ScriptThread, - get_current_container_version, - search_warning_on_too_verbose_log_level, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", - "platforms": ["amd64", "arm64"], -} - - -def verify_client_output(client_output): - output2_match = "Multiply2/product: [[2.0], [2.0]]" in client_output - output4_match = "Multiply4/product: [[4.0], [4.0]]" in client_output - if not (output2_match and output4_match): - raise ValueError("Couldn't find expected result") - else: - LOGGER.info("Results matches expected results") - - -def main(): - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested("examples/multiple_models_python/README.md", docker_image_with_name) - - install_cmd = ["bash", "examples/multiple_models_python/install.sh"] - with ScriptThread(install_cmd, name="install") as install_thread: - install_thread.join() - - if install_thread.returncode != 0: - raise RuntimeError(f"Install thread returned {install_thread.returncode}") - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - server_cmd = ["python", "examples/multiple_models_python/server.py"] - client_cmd = ["python", "examples/multiple_models_python/client.py"] - - with ScriptThread(server_cmd, name="server") as server_thread: - with ScriptThread(client_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - - LOGGER.info("Interrupting server script process") - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if client_thread.returncode != 0: - raise RuntimeError(f"Client returned {client_thread.returncode}") - if server_thread.returncode not in [0, -2]: # -2 is returned when process finished after receiving SIGINT signal - raise RuntimeError(f"Server returned {server_thread.returncode}") - - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - - verify_client_output(client_thread.output) - assert not search_warning_on_too_verbose_log_level(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_multiple_models_example/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_multiple_models_example/test.sh deleted file mode 100644 index d3fa045b8646339995fda4433fe19e2034d34d15..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_multiple_models_example/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 300 \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_nemo_megatron_gpt/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_nemo_megatron_gpt/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_nemo_megatron_gpt/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_nemo_megatron_gpt/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_nemo_megatron_gpt/test.py deleted file mode 100644 index b34d9378dc8f79d38e43ddeaacf8d14868d27105..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_nemo_megatron_gpt/test.py +++ /dev/null @@ -1,92 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of nemo_megatron_gpt_multinode example""" -import argparse -import logging -import re -import signal -import sys -import time - -from tests.utils import ( - DEFAULT_LOG_FORMAT, - ScriptThread, - get_current_container_version, - search_warning_on_too_verbose_log_level, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/nemo:23.06", -} - - -def verify_client_output(client_output): - expected_pattern = r"1 2 3 4" - output_match = re.search(expected_pattern, client_output, re.MULTILINE) - output_array = output_match.group(0) if output_match else None - if not output_array: - raise ValueError(f"Could not find {expected_pattern} in client output") - else: - LOGGER.info(f'Found "{expected_pattern}" in client output') - - -def main(): - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested( - "examples/nemo_megatron_gpt_multinode/README.md", docker_image_with_name - ) - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - server_cmd = ["python", "examples/nemo_megatron_gpt_multinode/server.py"] - client_cmd = ["python", "examples/nemo_megatron_gpt_multinode/client.py", "--prompts", "1 2 3"] - - with ScriptThread(server_cmd, name="server") as server_thread: - with ScriptThread(client_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - - LOGGER.info("Interrupting server script process") - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if client_thread.returncode != 0: - raise RuntimeError(f"Client returned {client_thread.returncode}") - if server_thread.returncode not in [0, -2]: # -2 is returned when process finished after receiving SIGINT signal - raise RuntimeError(f"Server returned {server_thread.returncode}") - - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - - verify_client_output(client_thread.output) - assert not search_warning_on_too_verbose_log_level(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_nemo_megatron_gpt/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_nemo_megatron_gpt/test.sh deleted file mode 100644 index e187a715344ce72f67a3179273a07359107f599b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_nemo_megatron_gpt/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 300 diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_online_learning/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_online_learning/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_online_learning/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_online_learning/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_online_learning/test.py deleted file mode 100644 index 2230d4e54153bab7cb31bcc808497ebd8eaae2e2..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_online_learning/test.py +++ /dev/null @@ -1,129 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of online_learning_mnist example""" -import argparse -import logging -import signal -import sys -import time - -from tests.utils import ( - DEFAULT_LOG_FORMAT, - ScriptThread, - get_current_container_version, - search_warning_on_too_verbose_log_level, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", -} - - -def get_accuracy(client_output): - chunks = client_output.split("Accuracy:") - if len(chunks) > 1: - chunks = chunks[-1].split("(") - chunks = chunks[-1].split("%") - accuracy = float(chunks[0].strip()) - return accuracy - else: - raise ValueError("Couldn't find accuracy in client output") - - -def check_client_closed_properly(client_thread, timeout): - if timeout: - raise ValueError("Client thread timed out") - if client_thread.is_alive(): - raise ValueError("Client thread is still alive") - if client_thread.returncode != 0: - raise ValueError("Client thread exited with non-zero exit code") - - -def main(): - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested("examples/online_learning_mnist/README.md", docker_image_with_name) - - install_cmd = ["bash", "examples/online_learning_mnist/install.sh"] - with ScriptThread(install_cmd, name="install") as install_thread: - install_thread.join() - - if install_thread.returncode != 0: - raise RuntimeError(f"Install thread returned {install_thread.returncode}") - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - server_cmd = ["python", "examples/online_learning_mnist/server.py"] - client_train_cmd = ["python", "examples/online_learning_mnist/client_train.py"] - client_infer_cmd = ["python", "examples/online_learning_mnist/client_infer.py", "--iter", "1"] - - with ScriptThread(server_cmd, name="server") as server_thread: - try: - with ScriptThread(client_infer_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - check_client_closed_properly(client_thread, elapsed_s > args.timeout_s) - acc = get_accuracy(client_thread.output) - if acc > 40: - raise ValueError("Accuracy should be close to 0.0") - LOGGER.info("Accuracy before training ok: %s", acc) - - with ScriptThread(client_train_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - check_client_closed_properly(client_thread, elapsed_s > args.timeout_s) - LOGGER.info("Training finished.") - - with ScriptThread(client_infer_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - check_client_closed_properly(client_thread, elapsed_s > args.timeout_s) - - acc = get_accuracy(client_thread.output) - if acc < 90: - raise ValueError("Accuracy should be close to 100") - LOGGER.info("Accuracy after training ok: %s", acc) - - finally: - LOGGER.info("Interrupting server script process") - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if server_thread.returncode not in [0, -2]: # -2 is returned when process finished after receiving SIGINT signal - raise ValueError(f"Server returned {server_thread.returncode}") - - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - - assert not search_warning_on_too_verbose_log_level(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_online_learning/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_online_learning/test.sh deleted file mode 100644 index ab0c8694b690105458b328ef5e36fbf68c90b5e5..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_online_learning/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 600 \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_perf_analyzer/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_perf_analyzer/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_perf_analyzer/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_perf_analyzer/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_perf_analyzer/test.py deleted file mode 100644 index baf61b4d350fcf013516069f8a01dc93bb234b20..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_perf_analyzer/test.py +++ /dev/null @@ -1,107 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of perf_analyzer example""" -import argparse -import logging -import re -import signal -import sys -import time -from multiprocessing.util import DEFAULT_LOGGING_FORMAT - -from tests.utils import ( - ScriptThread, - get_current_container_version, - search_warning_on_too_verbose_log_level, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", -} - - -def verify_client_output(client_output): - expected_patterns = [ - "Inferences/Second vs. Client Average Batch Latency", - r"Concurrency: 4, throughput: \d+.\d+ infer/sec, latency \d+ usec", - r"Concurrency: 8, throughput: \d+.\d+ infer/sec, latency \d+ usec", - r"Concurrency: 12, throughput: \d+.\d+ infer/sec, latency \d+ usec", - r"Concurrency: 16, throughput: \d+.\d+ infer/sec, latency \d+ usec", - ] - - for expected_pattern in expected_patterns: - output_match = re.search(expected_pattern, client_output, re.MULTILINE) - output_array = output_match.group(0) if output_match else None - if not output_array: - raise ValueError(f"Could not find {expected_pattern} in client output") - else: - LOGGER.info(f'Found "{expected_pattern}" in client output') - - -def main(): - from pytriton.client import ModelClient - - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOGGING_FORMAT) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested("examples/perf_analyzer/README.md", docker_image_with_name) - - install_cmd = ["bash", "examples/perf_analyzer/install.sh"] - with ScriptThread(install_cmd, name="install") as install_thread: - install_thread.join() - - if install_thread.returncode != 0: - raise RuntimeError(f"Install thread returned {install_thread.returncode}") - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - server_cmd = ["python", "examples/perf_analyzer/server.py"] - client_cmd = ["bash", "examples/perf_analyzer/client.sh"] - - with ScriptThread(server_cmd, name="server") as server_thread: - ModelClient("localhost", "BART").wait_for_model(timeout_s=args.timeout_s) - with ScriptThread(client_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - - LOGGER.info("Interrupting server script process") - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if client_thread.returncode != 0: - raise RuntimeError(f"Client returned {client_thread.returncode}") - if server_thread.returncode not in [0, -2]: # -2 is returned when process finished after receiving SIGINT signal - raise RuntimeError(f"Server returned {server_thread.returncode}") - - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - - verify_client_output(client_thread.output) - assert not search_warning_on_too_verbose_log_level(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_perf_analyzer/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_perf_analyzer/test.sh deleted file mode 100644 index d3fa045b8646339995fda4433fe19e2034d34d15..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_perf_analyzer/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 300 \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_simple_python_remote_mode/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_simple_python_remote_mode/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_simple_python_remote_mode/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_simple_python_remote_mode/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_simple_python_remote_mode/test.py deleted file mode 100644 index d56bb362198f81f6cd687b8297311a13a2717636..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_simple_python_remote_mode/test.py +++ /dev/null @@ -1,121 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of simple_python_remote_mode example""" -import argparse -import logging -import signal -import sys -import time - -from tests.utils import ( - DEFAULT_LOG_FORMAT, - ScriptThread, - get_current_container_version, - search_warning_on_too_verbose_log_level, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", -} - - -def verify_client_output(client_output): - expected_matches = [ - "add: [[3.0], [5.0]]", - "sub: [[-1.0], [-1.0]]", - "mul: [[2.0], [6.0]]", - "power: [[1.0], [8.0]]", - ] - for expected_match in expected_matches: - if expected_match not in client_output: - raise ValueError(f"Couldn't find expected result: {expected_match}") - LOGGER.info("Results matches expected results") - - -def main(): - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested("examples/simple_python_remote_mode/README.md", docker_image_with_name) - - install_cmd = ["bash", "examples/simple_python_remote_mode/install.sh"] - with ScriptThread(install_cmd, name="install") as install_thread: - install_thread.join() - - if install_thread.returncode != 0: - raise RuntimeError(f"Install thread returned {install_thread.returncode}") - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - server_cmd = ["python", "examples/simple_python_remote_mode/server_starting_triton.py"] - server_remote_mul_cmd = ["python", "examples/simple_python_remote_mode/server_remote_mul.py"] - server_remote_pow_cmd = ["python", "examples/simple_python_remote_mode/server_remote_power.py"] - client_cmd = ["python", "examples/simple_python_remote_mode/client.py"] - - with ScriptThread(server_cmd, name="server") as server_thread: - with ScriptThread(server_remote_mul_cmd, name="server_remote_mul") as server_remote_mul_thread: - with ScriptThread(server_remote_pow_cmd, name="server_remote_pow") as server_remote_pow_thread: - with ScriptThread(client_cmd, name="client") as client_thread: - while ( - all( - [ - server_thread.is_alive(), - server_remote_mul_thread.is_alive(), - server_remote_pow_thread.is_alive(), - client_thread.is_alive(), - ] - ) - and elapsed_s < args.timeout_s - ): - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - - LOGGER.info("Interrupting server script process") - if server_remote_pow_thread.process: - server_remote_pow_thread.process.send_signal(signal.SIGINT) - if server_remote_mul_thread.process: - server_remote_mul_thread.process.send_signal(signal.SIGINT) - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if client_thread.returncode != 0: - raise RuntimeError(f"Client returned {client_thread.returncode}") - if server_thread.returncode not in [0, -2]: # -2 is returned when process finished after receiving SIGINT signal - raise RuntimeError(f"Server returned {server_thread.returncode}") - if server_remote_mul_thread.returncode not in [0, -2]: - raise RuntimeError(f"Remote mul server returned {server_remote_mul_thread.returncode}") - if server_remote_pow_thread.returncode not in [0, -2]: - raise RuntimeError(f"Remote pow server returned {server_remote_pow_thread.returncode}") - - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - - verify_client_output(client_thread.output) - assert not search_warning_on_too_verbose_log_level(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_simple_python_remote_mode/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_simple_python_remote_mode/test.sh deleted file mode 100644 index 5590e9cd136b723d11306b24a2ea709c69de6aa5..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_simple_python_remote_mode/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 300 \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_use_parameters_and_headers/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_use_parameters_and_headers/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_use_parameters_and_headers/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_use_parameters_and_headers/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_use_parameters_and_headers/test.py deleted file mode 100644 index caf356b1ec4aa6964f385c0a18475ca45e932e46..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_use_parameters_and_headers/test.py +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of use_parameters_and_headers example""" -import argparse -import logging -import signal -import subprocess -import sys -import time - -from tests.utils import ( - DEFAULT_LOG_FORMAT, - ScriptThread, - get_current_container_version, - search_warning_on_too_verbose_log_level, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", - "platforms": ["amd64", "arm64"], -} - - -def verify_client_output(client_output): - output1_match = "scaled_add: [[1.0], [1.0]]" in client_output - output2_match = "scaled_sub: [[2.0], [2.0]]" in client_output - if not (output1_match and output2_match): - raise ValueError("Couldn't find expected result") - else: - LOGGER.info("Results matches expected results") - - -def main(): - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested( - "examples/use_parameters_and_headers/README.md", docker_image_with_name - ) - - subprocess.run(["bash", "examples/use_parameters_and_headers/install.sh"]) - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - server_cmd = ["python", "examples/use_parameters_and_headers/server.py"] - client_cmd = ["python", "examples/use_parameters_and_headers/client.py"] - - with ScriptThread(server_cmd, name="server") as server_thread: - with ScriptThread(client_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - - LOGGER.info("Interrupting server script process") - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if client_thread.returncode != 0: - raise RuntimeError(f"Client returned {client_thread.returncode}") - if server_thread.returncode not in [0, -2]: # -2 is returned when process finished after receiving SIGINT signal - raise RuntimeError(f"Server returned {server_thread.returncode}") - - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - - verify_client_output(client_thread.output) - assert not search_warning_on_too_verbose_log_level(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_use_parameters_and_headers/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_example_use_parameters_and_headers/test.sh deleted file mode 100644 index 5590e9cd136b723d11306b24a2ea709c69de6aa5..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_example_use_parameters_and_headers/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 300 \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_fatal_error_handling/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_fatal_error_handling/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_fatal_error_handling/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_fatal_error_handling/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_fatal_error_handling/test.py deleted file mode 100644 index af382d7e4b0896d2def6bc320580a59c25826195..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_fatal_error_handling/test.py +++ /dev/null @@ -1,106 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests errors passing e2e""" -import argparse -import logging -import os -import random -import time - -from tests.utils import ProcessMonitoring - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", -} - - -def main(): - import numpy as np - import pytest - - from pytriton.client import ModelClient - from pytriton.client.exceptions import PyTritonClientInferenceServerError - from pytriton.decorators import batch - from pytriton.exceptions import PyTritonUnrecoverableError - from pytriton.model_config import ModelConfig, Tensor - from pytriton.triton import Triton, TritonConfig - from tests.utils import DEFAULT_LOG_FORMAT, find_free_port - - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--init-timeout-s", required=False, default=300, type=float, help="Timeout for server and models initialization" - ) - parser.add_argument( - "--shutdown-timeout-s", - required=False, - default=300, - type=float, - help="Timeout for server to shutdown on PyTritonUnrecoverableError", - ) - parser.add_argument("--seed", type=int, help="PRNG seed", required=False) - parser.add_argument("--verbose", "-v", action="store_true", help="Timeout for test") - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - LOGGER.debug(f"CLI args: {args}") - - random.seed(args.seed) - - triton_config = TritonConfig(grpc_port=find_free_port(), http_port=find_free_port(), metrics_port=find_free_port()) - LOGGER.debug(f"Using {triton_config}") - - @batch - def _throw_unrecoverable_error(**_): - raise PyTritonUnrecoverableError("Some unrecoverable error occurred thus no further inferences possible.") - - triton = Triton(config=triton_config) - triton.bind( - model_name="proxy", - infer_func=_throw_unrecoverable_error, - inputs=[Tensor(dtype=np.float32, shape=(-1,))], - outputs=[Tensor(dtype=np.float32, shape=(-1,))], - config=ModelConfig(max_batch_size=128), - ) - triton.run() - - batch_size = 6 - input1 = np.arange(batch_size * 10, batch_size * 10 + batch_size, 1).reshape(batch_size, 1).astype(np.float32) - - protocol = random.choice(["http", "grpc"]) - url = f"{protocol}://localhost:{getattr(triton_config, f'{protocol}_port')}" - with ModelClient(url, "proxy", init_timeout_s=args.init_timeout_s) as client: - with pytest.raises(PyTritonClientInferenceServerError, match="no further inferences possible"): - client.infer_batch(input1) - - # wait for shutting down of server and proxy_backend - monitoring = ProcessMonitoring(os.getpid()) - shutdown_timeout_s = args.shutdown_timeout_s - while ( - triton._triton_server.is_alive() or triton.is_alive() # noqa # pytype: disable=attribute-error - ) and shutdown_timeout_s > 0: - time.sleep(5) - shutdown_timeout_s -= 5 - monitoring.dump_state() - - assert shutdown_timeout_s > 0, "Timeout occurred - server or backend still alive" - assert not triton._triton_server.is_alive() # noqa # pytype: disable=attribute-error - assert not triton.is_alive() - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_fatal_error_handling/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_fatal_error_handling/test.sh deleted file mode 100644 index a75a6366daeb240b4de160e4cd144344866e2069..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_fatal_error_handling/test.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" - -python -m"${TEST_MODULE}" \ - --init-timeout-s 300 \ - --shutdown-timeout-s 60 \ - --verbose \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_inference_with_variable_io_size/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_inference_with_variable_io_size/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_inference_with_variable_io_size/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_inference_with_variable_io_size/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_inference_with_variable_io_size/test.py deleted file mode 100644 index 8fcdeb6ae7d3242b88a3f89b1ce403276f5e4a7a..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_inference_with_variable_io_size/test.py +++ /dev/null @@ -1,98 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""e2e tests inference on add_sub model with different i/o size which might cause shared memory reallocation""" -import argparse -import logging -import random - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", - "platforms": ["amd64", "arm64"], -} - - -def main(): - import numpy as np - - from pytriton.client import ModelClient - from pytriton.triton import Triton, TritonConfig - from tests.functional.common.models import ADD_SUB_PYTHON_MODEL - from tests.utils import DEFAULT_LOG_FORMAT, find_free_port - - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--init-timeout-s", required=False, default=300, type=float, help="Timeout for server and models initialization" - ) - parser.add_argument("--seed", type=int, help="PRNG seed", required=False) - parser.add_argument("--verbose", "-v", action="store_true", help="Timeout for test") - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - verbose_level = 3 if args.verbose else 0 - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - LOGGER.debug(f"CLI args: {args}") - - random.seed(args.seed) - - triton_config = TritonConfig( - grpc_port=find_free_port(), http_port=find_free_port(), metrics_port=find_free_port(), log_verbose=verbose_level - ) - LOGGER.debug(f"Using {triton_config}") - - with Triton(config=triton_config) as triton: - model_spec = ADD_SUB_PYTHON_MODEL - LOGGER.debug(f"Using {model_spec}") - triton.bind( - model_name=model_spec.name, - infer_func=model_spec.create_infer_fn(), - inputs=model_spec.inputs, - outputs=model_spec.outputs, - config=model_spec.model_config, - ) - triton.run() - - protocol = random.choice(["http", "grpc"]) - protocol_port = getattr(triton_config, f"{protocol}_port") - url = f"{protocol}://localhost:{protocol_port}" - with ModelClient(url, model_spec.name, init_timeout_s=args.init_timeout_s) as client: - batch_size = 2 - a_batch = np.ones((batch_size, 1), dtype=np.float32) - b_batch = np.ones((batch_size, 1), dtype=np.float32) - - result_batch = client.infer_batch(a_batch, b_batch) - np.testing.assert_allclose(result_batch["add"], a_batch + b_batch) - np.testing.assert_allclose(result_batch["sub"], a_batch - b_batch) - - batch_size = 128 - a_batch = np.ones((batch_size, 32), dtype=np.float32) - b_batch = np.ones((batch_size, 32), dtype=np.float32) - - result_batch = client.infer_batch(a_batch, b_batch) - np.testing.assert_allclose(result_batch["add"], a_batch + b_batch) - np.testing.assert_allclose(result_batch["sub"], a_batch - b_batch) - - batch_size = 1 - a_batch = np.ones((batch_size, 64), dtype=np.float32) - b_batch = np.ones((batch_size, 64), dtype=np.float32) - - result_batch = client.infer_batch(a_batch, b_batch) - np.testing.assert_allclose(result_batch["add"], a_batch + b_batch) - np.testing.assert_allclose(result_batch["sub"], a_batch - b_batch) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_inference_with_variable_io_size/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_inference_with_variable_io_size/test.sh deleted file mode 100644 index b303b22c2974f4ac63019097281cd39a217ab832..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_inference_with_variable_io_size/test.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" - -pip install numpy -python -m"${TEST_MODULE}" \ - --init-timeout-s 300 \ - --seed 2022101915 \ - --verbose diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_model_error_handling/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_model_error_handling/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_model_error_handling/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_model_error_handling/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_model_error_handling/test.py deleted file mode 100644 index 3086bca15fad7e39580bbe0367a2b650ee1e89d9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_model_error_handling/test.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests errors passing e2e""" -import argparse -import logging -import random - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", - "platforms": ["amd64", "arm64"], -} - - -def main(): - import numpy as np - import pytest - - from pytriton.client import ModelClient - from pytriton.client.exceptions import PyTritonClientInferenceServerError - from pytriton.decorators import batch - from pytriton.model_config import ModelConfig, Tensor - from pytriton.triton import Triton, TritonConfig - from tests.utils import DEFAULT_LOG_FORMAT, find_free_port - - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--init-timeout-s", required=False, default=300, type=float, help="Timeout for server and models initialization" - ) - parser.add_argument("--seed", type=int, help="PRNG seed", required=False) - parser.add_argument("--verbose", "-v", action="store_true", help="Timeout for test") - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - LOGGER.debug(f"CLI args: {args}") - - random.seed(args.seed) - - triton_config = TritonConfig(grpc_port=find_free_port(), http_port=find_free_port(), metrics_port=find_free_port()) - LOGGER.debug(f"Using {triton_config}") - - @batch - def _throw_division_error(**_): - return 2 / 0 - - with Triton(config=triton_config) as triton: - triton.bind( - model_name="proxy", - infer_func=_throw_division_error, - inputs=[Tensor(dtype=np.float32, shape=(-1,))], - outputs=[Tensor(dtype=np.float32, shape=(-1,))], - config=ModelConfig(max_batch_size=128), - ) - triton.run() - - batch_size = 6 - input1 = np.arange(batch_size * 10, batch_size * 10 + batch_size, 1).reshape(batch_size, 1).astype(np.float32) - - protocol = random.choice(["http", "grpc"]) - url = f"{protocol}://localhost:{getattr(triton_config, f'{protocol}_port')}" - with ModelClient(url, "proxy", init_timeout_s=args.init_timeout_s) as client: - with pytest.raises(PyTritonClientInferenceServerError, match="division by zero"): - client.infer_batch(input1) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_model_error_handling/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_model_error_handling/test.sh deleted file mode 100644 index 2f2bbdb356f1ab318acec67e834a39374a8f4b37..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_model_error_handling/test.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" - -python -m"${TEST_MODULE}" \ - --init-timeout-s 300 \ - --verbose \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_network_timeouts/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_network_timeouts/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_network_timeouts/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_network_timeouts/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_network_timeouts/test.py deleted file mode 100644 index 6bc50c746b58970c8a98117359a3cfa35b37e758..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_network_timeouts/test.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test metadata""" - -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", - "platforms": ["amd64", "arm64"], -} diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_network_timeouts/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_network_timeouts/test.sh deleted file mode 100644 index f63b78c0ef995953280969fbcd00eacd0410d312..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_network_timeouts/test.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" - -pip install pytest-timeout numpy -pytest -svvv \ - --log-cli-level=DEBUG \ - --log-cli-format='%(asctime)s [%(levelname)s] [%(process)d:%(thread)d] %(name)s:%(lineno)d: %(message)s' \ - --timeout=25 \ - ${THIS_SCRIPT_DIR}/test_pytest.py diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_network_timeouts/test_pytest.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_network_timeouts/test_pytest.py deleted file mode 100644 index 20020b08633d9ce83cf4acf26e51a18f47257060..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_network_timeouts/test_pytest.py +++ /dev/null @@ -1,374 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of network timeouts with pytest""" - -import contextlib -import logging -import socket -import time - -import numpy as np -import pytest - -from pytriton.client import AsyncioModelClient, FuturesModelClient, ModelClient -from pytriton.client.exceptions import PyTritonClientInferenceServerError, PyTritonClientTimeoutError -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton, TritonConfig - -_LOGGER = logging.getLogger(__name__) - -_SMALLEST_TIMEOUT = 0.0 -_SMALL_TIMEOUT = 0.5 -_LARGE_TIMEOUT = 1.5 -_GARGANTUAN_TIMEOUT = 10.0 -_WRONG_TIMEOUT = -1.0 - - -@pytest.fixture(scope="function") -def find_free_ports(): - """Fixture to find free ports for gprc, http, and metrics""" - with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as grpc: - grpc.bind(("", 0)) - grpc.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as http: - http.bind(("", 0)) - http.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as metrics: - metrics.bind(("", 0)) - metrics.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - yield { - "grpc_port": grpc.getsockname()[1], - "http_port": http.getsockname()[1], - "metrics_port": metrics.getsockname()[1], - } - - -# Define a fixture to start and stop the Triton server with the Sleeper model -@pytest.fixture(scope="function") -def triton_server(find_free_ports): - @batch - def _infer_fn(**inputs): # noqa: N803 - _LOGGER.debug(f"Inputs: {inputs}") - sleep_time = inputs["INPUT_1"].squeeze().item() - _LOGGER.info(f"Will sleep {sleep_time}s") - time.sleep(sleep_time) - return_value = { - "OUTPUT_1": inputs["INPUT_1"], - } - _LOGGER.debug(f"Return value {return_value}") - return return_value - - class TritonInstance: - - """Context manager to hold Triton instance and ports""" - - def __init__(self, grpc_port, http_port, metrics_port, model_name, infer_function): - self.grpc_port = grpc_port - self.http_port = http_port - self.metrics_port = metrics_port - self.model_name = model_name - self.config = TritonConfig(http_port=http_port, grpc_port=grpc_port, metrics_port=metrics_port) - self.infer_function = infer_function - self.grpc_url = f"grpc://localhost:{self.grpc_port}" - self.http_url = f"http://localhost:{self.http_port}" - - def __enter__(self): - try: - _LOGGER.info("Checking if Triton server is already running.") - ModelClient( - self.http_url, - self.model_name, - init_timeout_s=_SMALL_TIMEOUT, - inference_timeout_s=_SMALL_TIMEOUT, - lazy_init=False, - ) - message = "Triton server already running." - _LOGGER.error(message) - raise RuntimeError(message) - except PyTritonClientTimeoutError: - _LOGGER.debug("Triton server not running.") - pass - self.triton = Triton(config=self.config) - _LOGGER.debug(f"Binding {self.model_name} model.") - self.triton.bind( - model_name=self.model_name, - infer_func=self.infer_function, - inputs=[ - Tensor(dtype=np.float64, shape=(-1, 1)), - ], - outputs=[ - Tensor(dtype=np.float64, shape=(-1, 1)), - ], - config=ModelConfig(max_batch_size=128), - ) - _LOGGER.info("Running Triton server.") - self.triton.run() - _LOGGER.debug("Waiting for Triton server to load model.") - with ModelClient( - self.http_url, - self.model_name, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_GARGANTUAN_TIMEOUT, - lazy_init=False, - ) as client: - _LOGGER.info(f"Triton server ready. {client.model_config}") - return self - - def __exit__(self, exc_type, exc_value, traceback): - _LOGGER.debug("Triton server stopping.") - self.triton.stop() - _LOGGER.debug("Checking if Triton server is still running.") - while True: - try: - with ModelClient( - self.http_url, - self.model_name, - init_timeout_s=_SMALL_TIMEOUT, - inference_timeout_s=_SMALL_TIMEOUT, - lazy_init=False, - ) as client: - _LOGGER.info(f"Triton server still running. {client.model_config}") - except PyTritonClientTimeoutError: - _LOGGER.debug("Triton server not running.") - break - _LOGGER.debug(f"Triton server still alive, so sleeping for {_SMALL_TIMEOUT}s.") - time.sleep(_SMALL_TIMEOUT) - _LOGGER.info("Triton server stopped.") - - _LOGGER.debug(f"Using ports: grpc={find_free_ports}") - with TritonInstance(**find_free_ports, model_name="Sleeper", infer_function=_infer_fn) as triton: - yield triton - - -# Define a fixture to create and return a client object with a very small timeout value -@pytest.fixture(scope="function") -def http_client(triton_server): - _LOGGER.debug( - f"Preparing client for {triton_server.http_url} with init timeout {_GARGANTUAN_TIMEOUT} and inference timeout {_SMALL_TIMEOUT}." - ) - yield ModelClient( - url=triton_server.http_url, - model_name=triton_server.model_name, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_SMALL_TIMEOUT, - ) - - -# Define a fixture to create and return a client object with a very small timeout value -@pytest.fixture(scope="function") -def grpc_client(triton_server): - _LOGGER.debug( - f"Preparing client for {triton_server.grpc_url} with init timeout {_GARGANTUAN_TIMEOUT} and inference timeout {_SMALL_TIMEOUT}." - ) - yield ModelClient( - url=triton_server.grpc_url, - model_name=triton_server.model_name, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_SMALL_TIMEOUT, - ) - - -# Define a fixture to create and return a client object with a very small timeout value -@pytest.fixture(scope="function") -async def async_http_client(triton_server): - _LOGGER.debug( - f"Preparing client for {triton_server.http_url} with init timeout {_GARGANTUAN_TIMEOUT} and inference timeout {_SMALL_TIMEOUT}." - ) - return AsyncioModelClient( - url=triton_server.http_url, - model_name=triton_server.model_name, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_SMALL_TIMEOUT, - ) - - -# Define a fixture to create and return a client object with a very small timeout value -@pytest.fixture(scope="function") -async def async_grpc_client(triton_server): - _LOGGER.debug( - f"Preparing client for {triton_server.grpc_url} with init timeout {_GARGANTUAN_TIMEOUT} and inference timeout {_SMALL_TIMEOUT}." - ) - return AsyncioModelClient( - url=triton_server.grpc_url, - model_name=triton_server.model_name, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_SMALL_TIMEOUT, - ) - - -# Define a fixture to create and return a client object with a very small timeout value -@pytest.fixture(scope="function") -def futures_http_client(triton_server): - _LOGGER.debug( - f"Preparing client for {triton_server.http_url} with init timeout {_GARGANTUAN_TIMEOUT} and inference timeout {_SMALL_TIMEOUT}." - ) - yield FuturesModelClient( - url=triton_server.http_url, - model_name=triton_server.model_name, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_SMALL_TIMEOUT, - ) - - -# Define a fixture to create and return a client object with a very small timeout value -@pytest.fixture(scope="function") -def futures_grpc_client(triton_server): - _LOGGER.debug( - f"Preparing client for {triton_server.grpc_url} with init timeout {_GARGANTUAN_TIMEOUT} and inference timeout {_SMALL_TIMEOUT}." - ) - yield FuturesModelClient( - url=triton_server.grpc_url, - model_name=triton_server.model_name, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_SMALL_TIMEOUT, - ) - - -# Define a fixture to create and return an input array with a value of 1.5 seconds -@pytest.fixture(scope="session") -def input_sleep_large(): - _LOGGER.debug(f"Preparing input array with value {_LARGE_TIMEOUT}.") - yield np.array([[_LARGE_TIMEOUT]], dtype=np.float64) - - -# Define a fixture to create and return an input array with a value of -1 seconds -@pytest.fixture(scope="session") -def input_sleep_wrong(): - _LOGGER.debug(f"Preparing input array with value {_LARGE_TIMEOUT}.") - yield np.array([[_WRONG_TIMEOUT]], dtype=np.float64) - - -# Define a fixture to create and return an input array with a value of 1.5 seconds -@pytest.fixture(scope="session") -def input_sleep_smallest(): - _LOGGER.debug(f"Preparing input array with value {_LARGE_TIMEOUT}.") - yield np.array([[_SMALLEST_TIMEOUT]], dtype=np.float64) - - -def test_infer_sample_network_timeout_grpc(triton_server, grpc_client, input_sleep_large): - _LOGGER.debug(f"Testing grpc_client with input {input_sleep_large}.") - with pytest.raises(PyTritonClientTimeoutError): - with grpc_client as client: - client.infer_sample(input_sleep_large) - - -def test_infer_sample_network_timeout_http(triton_server, http_client, input_sleep_large): - _LOGGER.debug(f"Testing http_client with input {input_sleep_large}.") - with pytest.raises(PyTritonClientTimeoutError): - with http_client as client: - client.infer_sample(input_sleep_large) - - -def test_infer_sample_model_failure_grpc(triton_server, grpc_client, input_sleep_wrong): - _LOGGER.debug(f"Testing grpc_client with input {input_sleep_wrong}.") - with pytest.raises(PyTritonClientInferenceServerError): - with grpc_client as client: - client.infer_sample(input_sleep_wrong) - - -def test_infer_sample_model_failure_http(triton_server, http_client, input_sleep_wrong): - _LOGGER.debug(f"Testing http_client with input {input_sleep_wrong}.") - with pytest.raises(PyTritonClientInferenceServerError): - with http_client as client: - client.infer_sample(input_sleep_wrong) - - -def test_infer_sample_success_grpc(triton_server, grpc_client, input_sleep_smallest): - _LOGGER.debug(f"Testing grpc_client with input {input_sleep_smallest}.") - with grpc_client as client: - result = client.infer_sample(input_sleep_smallest) - assert result["OUTPUT_1"] == input_sleep_smallest - - -def test_infer_sample_success_http(triton_server, http_client, input_sleep_smallest): - _LOGGER.debug(f"Testing http_client with input {input_sleep_smallest}.") - with http_client as client: - result = client.infer_sample(input_sleep_smallest) - assert result["OUTPUT_1"] == input_sleep_smallest - - -async def test_infer_sample_network_timeout_async_grpc(triton_server, async_grpc_client, input_sleep_large): - _LOGGER.debug(f"Testing grpc_client with input {input_sleep_large}.") - with pytest.raises(PyTritonClientTimeoutError): - async with async_grpc_client as client: - await client.infer_sample(input_sleep_large) - - -async def test_infer_sample_network_timeout_async_http(triton_server, async_http_client, input_sleep_large): - _LOGGER.debug(f"Testing http_client with input {input_sleep_large}.") - with pytest.raises(PyTritonClientTimeoutError): - async with async_http_client as client: - await client.infer_sample(input_sleep_large) - - -async def test_infer_sample_model_failure_async_grpc(triton_server, async_grpc_client, input_sleep_wrong): - _LOGGER.debug(f"Testing grpc_client with input {input_sleep_wrong}.") - with pytest.raises(PyTritonClientInferenceServerError): - async with async_grpc_client as client: - await client.infer_sample(input_sleep_wrong) - - -async def test_infer_sample_model_failure_async_http(triton_server, async_http_client, input_sleep_wrong): - _LOGGER.debug(f"Testing http_client with input {input_sleep_wrong}.") - with pytest.raises(PyTritonClientInferenceServerError): - async with async_http_client as client: - await client.infer_sample(input_sleep_wrong) - - -async def test_infer_sample_success_async_http(triton_server, async_http_client, input_sleep_smallest): - _LOGGER.debug(f"Testing http_client with input {input_sleep_smallest}.") - async with async_http_client as client: - result = await client.infer_sample(input_sleep_smallest) - assert result["OUTPUT_1"] == input_sleep_smallest - - -async def test_infer_sample_success_async_grpc(triton_server, async_grpc_client, input_sleep_smallest): - _LOGGER.debug(f"Testing http_client with input {input_sleep_smallest}.") - async with async_grpc_client as client: - result = await client.infer_sample(input_sleep_smallest) - assert result["OUTPUT_1"] == input_sleep_smallest - - -def test_infer_sample_network_timeout_grpc_futures(triton_server, futures_grpc_client, input_sleep_large): - _LOGGER.debug(f"Testing futures_grpc_client with input {input_sleep_large}.") - with futures_grpc_client as client: - future = client.infer_sample(input_sleep_large) - with pytest.raises(PyTritonClientTimeoutError): - future.result() - - -def test_infer_sample_network_timeout_http_futures(triton_server, futures_http_client, input_sleep_large): - _LOGGER.debug(f"Testing futures_http_client with input {input_sleep_large}.") - with futures_http_client as client: - future = client.infer_sample(input_sleep_large) - with pytest.raises(PyTritonClientTimeoutError): - future.result() - - -def test_infer_sample_model_failure_grpc_futures(triton_server, futures_grpc_client, input_sleep_wrong): - _LOGGER.debug(f"Testing futures_grpc_client with input {input_sleep_wrong}.") - with futures_grpc_client as client: - future = client.infer_sample(input_sleep_wrong) - with pytest.raises(PyTritonClientInferenceServerError): - future.result() - - -def test_infer_sample_model_failure_http_futures(triton_server, futures_http_client, input_sleep_wrong): - _LOGGER.debug(f"Testing futures_http_client with input {input_sleep_wrong}.") - with futures_http_client as client: - future = client.infer_sample(input_sleep_wrong) - with pytest.raises(PyTritonClientInferenceServerError): - future.result() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_remote_life_cycle/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_remote_life_cycle/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_remote_life_cycle/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_remote_life_cycle/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_remote_life_cycle/test.py deleted file mode 100644 index ac29b9e7cfea3848a58a5aec9c2ad75cd54b5134..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_remote_life_cycle/test.py +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test metadata""" - -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", -} diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_remote_life_cycle/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_remote_life_cycle/test.sh deleted file mode 100644 index d7ea953a0e8619b9789ee2e5cac4df8c12b35097..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_remote_life_cycle/test.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" - -pip install pytest-timeout numpy -pytest -svvv \ - --log-cli-level=DEBUG \ - --log-cli-format='%(asctime)s [%(levelname)s] [%(process)d:%(thread)d] %(name)s:%(lineno)d: %(message)s' \ - --timeout=60 \ - ${THIS_SCRIPT_DIR}/test_pytest.py \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_remote_life_cycle/test_pytest.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_remote_life_cycle/test_pytest.py deleted file mode 100644 index aaf4dfd825faf50a18888c20cb899d5800385acf..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_remote_life_cycle/test_pytest.py +++ /dev/null @@ -1,534 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of network timeouts with pytest""" - -import contextlib -import logging -import socket -import time - -import numpy as np -import pytest - -from pytriton.client import FuturesModelClient, ModelClient -from pytriton.client.exceptions import PyTritonClientInferenceServerError, PyTritonClientTimeoutError -from pytriton.client.utils import create_client_from_url -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import RemoteTriton, Triton, TritonConfig - -_LOGGER = logging.getLogger(__name__) - -_SMALLEST_TIMEOUT = 0.0 -_SMALL_TIMEOUT = 0.5 -_GARGANTUAN_TIMEOUT = 10.0 - - -@pytest.fixture(scope="function") -def find_free_ports(): - """Fixture to find free ports for gprc, http, and metrics""" - with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as grpc: - grpc.bind(("", 0)) - grpc.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as http: - http.bind(("", 0)) - http.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as metrics: - metrics.bind(("", 0)) - metrics.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - yield { - "grpc_port": grpc.getsockname()[1], - "http_port": http.getsockname()[1], - "metrics_port": metrics.getsockname()[1], - } - - -# Define a fixture to start and stop the Triton server with the Sleeper model -@pytest.fixture(scope="function") -def triton_server(find_free_ports): - @batch - def _infer_fn(**inputs): # noqa: N803 - _LOGGER.debug(f"Inputs: {inputs}") - return_value = { - "OUTPUT_1": inputs["INPUT_1"], - } - _LOGGER.debug(f"Return value {return_value}") - return return_value - - class TritonInstance: - - """Context manager to hold Triton instance and ports""" - - def __init__(self, grpc_port, http_port, metrics_port, model_name, infer_function): - self.grpc_port = grpc_port - self.http_port = http_port - self.metrics_port = metrics_port - self.model_name = model_name - self.config = TritonConfig(http_port=http_port, grpc_port=grpc_port, metrics_port=metrics_port) - self.infer_function = infer_function - self.grpc_url = f"grpc://localhost:{self.grpc_port}" - self.http_url = f"http://localhost:{self.http_port}" - - def __enter__(self): - try: - _LOGGER.info("Checking if Triton server is already running.") - ModelClient( - self.http_url, - self.model_name, - init_timeout_s=_SMALL_TIMEOUT, - inference_timeout_s=_SMALL_TIMEOUT, - lazy_init=False, - ) - message = "Triton server already running." - _LOGGER.error(message) - raise RuntimeError(message) - except PyTritonClientTimeoutError: - _LOGGER.debug("Triton server not running.") - pass - self.triton = Triton(config=self.config) - _LOGGER.debug(f"Binding {self.model_name} model.") - self.triton.bind( - model_name=self.model_name, - infer_func=self.infer_function, - inputs=[ - Tensor(dtype=np.float64, shape=(-1, 1)), - ], - outputs=[ - Tensor(dtype=np.float64, shape=(-1, 1)), - ], - config=ModelConfig(max_batch_size=128), - ) - _LOGGER.info("Running Triton server.") - self.triton.run() - _LOGGER.debug("Waiting for Triton server to load model.") - with ModelClient( - self.http_url, - self.model_name, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_GARGANTUAN_TIMEOUT, - lazy_init=False, - ) as client: - _LOGGER.info(f"Triton server ready. {client.model_config}") - return self - - def __exit__(self, exc_type, exc_value, traceback): - _LOGGER.debug("Triton server stopping.") - self.triton.stop() - _LOGGER.debug("Checking if Triton server is still running.") - while True: - try: - with ModelClient( - self.http_url, - self.model_name, - init_timeout_s=_SMALL_TIMEOUT, - inference_timeout_s=_SMALL_TIMEOUT, - lazy_init=False, - ) as client: - _LOGGER.info(f"Triton server still running. {client.model_config}") - except PyTritonClientTimeoutError: - _LOGGER.debug("Triton server not running.") - break - _LOGGER.debug(f"Triton server still alive, so sleeping for {_SMALL_TIMEOUT}s.") - time.sleep(_SMALL_TIMEOUT) - _LOGGER.info("Triton server stopped.") - - _LOGGER.debug(f"Using ports: grpc={find_free_ports}") - with TritonInstance(**find_free_ports, model_name="LocalIdentity", infer_function=_infer_fn) as triton: - yield triton - - -# Define a fixture to create and return a client object with a very small timeout value -@pytest.fixture(scope="function") -def http_client(triton_server): - _LOGGER.debug( - f"Preparing client for {triton_server.http_url} with init timeout {_GARGANTUAN_TIMEOUT} and inference timeout {_SMALL_TIMEOUT}." - ) - yield ModelClient( - url=triton_server.http_url, - model_name=triton_server.model_name, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_SMALL_TIMEOUT, - ) - - -# Define a fixture to create and return an input array with a value of 1.5 seconds -@pytest.fixture(scope="session") -def input_sleep_smallest(): - _LOGGER.debug(f"Preparing input array with value {[_SMALLEST_TIMEOUT]}.") - yield np.array([[_SMALLEST_TIMEOUT]], dtype=np.float64) - - -def test_remote_triton_not_connected(): - _LOGGER.debug("Testing RemoteTriton not connected after instantiation.") - t = RemoteTriton(url="localhost:8000") - assert not t.is_connected() - - -def test_remote_triton_connect_with_context(): - _LOGGER.debug("Testing RemoteTriton connect with context manager.") - with Triton() as t: - assert t.is_connected() - assert t.is_alive() - t.bind("m1", lambda: None, [], []) - assert t.is_alive() - with RemoteTriton(url="localhost:8000") as rt: - assert rt.is_connected() - rt.bind("m2", lambda: None, [], []) - assert rt.is_alive() - - -def test_remote_triton_connect(): - _LOGGER.debug("Testing RemoteTriton connect with connect method.") - with Triton() as t: - assert t.is_alive() - assert t.is_connected() - - rt2 = RemoteTriton(url="localhost:8000") - assert not rt2.is_connected() - assert rt2.is_alive() - rt2.bind("m2", lambda: None, [], []) - assert not rt2.is_connected() - assert not rt2.is_alive() - rt2.connect() - assert rt2.is_connected() - assert rt2.is_alive() - - with ModelClient("localhost:8000", "m2", lazy_init=False) as _: - pass - - rt2.stop() - - with create_client_from_url("localhost:8000") as tr_client: - assert not tr_client.is_model_ready("m2") - - -def test_bind_multiple_models(): - _LOGGER.debug("Testing Triton bind multiple models.") - t = Triton() - assert not t.is_connected() - assert not t.is_alive() - t.bind("m1", lambda: None, [], []) - assert not t.is_alive() - t.run() - assert t.is_alive() - with ModelClient("localhost:8000", "m1", lazy_init=False) as _: - pass - - t.bind("m2", lambda: None, [], []) - assert t.is_alive() - with ModelClient("localhost:8000", "m2", lazy_init=False) as _: - pass - - t.stop() - assert not t.is_alive() - assert not t.is_connected() - - -def test_local_and_remote_models_context_manager(triton_server, http_client, input_sleep_smallest): - _LOGGER.debug(f"Testing http_client with input {input_sleep_smallest}.") - with http_client as local_client: - result = local_client.infer_sample(input_sleep_smallest) - assert result["OUTPUT_1"] == input_sleep_smallest - - @batch - def _infer_fn(**inputs): # noqa: N803 - _LOGGER.debug(f"Inputs: {inputs}") - return_value = { - "OUTPUT_1": inputs["INPUT_1"], - } - _LOGGER.debug(f"Return value {return_value}") - return return_value - - remote_model = "RemoteIdentity" - - with RemoteTriton(url=triton_server.http_url) as triton: - triton.bind( - model_name=remote_model, - infer_func=_infer_fn, - inputs=[ - Tensor(dtype=np.float64, shape=(-1, 1)), - ], - outputs=[ - Tensor(dtype=np.float64, shape=(-1, 1)), - ], - config=ModelConfig(max_batch_size=128), - ) - with ModelClient( - url=triton_server.http_url, - model_name=remote_model, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_GARGANTUAN_TIMEOUT, - lazy_init=False, - ) as remote_client: - result = remote_client.infer_sample(input_sleep_smallest) - assert result["OUTPUT_1"] == input_sleep_smallest - - -def test_local_and_remote_models_explicite_run(triton_server, http_client, input_sleep_smallest): - _LOGGER.debug(f"Testing http_client with input {input_sleep_smallest}.") - with http_client as local_client: - result = local_client.infer_sample(input_sleep_smallest) - assert result["OUTPUT_1"] == input_sleep_smallest - - @batch - def _infer_fn(**inputs): # noqa: N803 - _LOGGER.debug(f"Inputs: {inputs}") - return_value = { - "OUTPUT_1": inputs["INPUT_1"], - } - _LOGGER.debug(f"Return value {return_value}") - return return_value - - remote_model = "RemoteIdentity" - - triton = RemoteTriton(url=triton_server.http_url) - triton.connect() - triton.bind( - model_name=remote_model, - infer_func=_infer_fn, - inputs=[ - Tensor(dtype=np.float64, shape=(-1, 1)), - ], - outputs=[ - Tensor(dtype=np.float64, shape=(-1, 1)), - ], - config=ModelConfig(max_batch_size=128), - ) - with ModelClient( - url=triton_server.http_url, - model_name=remote_model, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_GARGANTUAN_TIMEOUT, - lazy_init=False, - ) as remote_client: - result = remote_client.infer_sample(input_sleep_smallest) - assert result["OUTPUT_1"] == input_sleep_smallest - triton.stop() - - -def test_local_and_remote_models_survive_remote_close(triton_server, http_client, input_sleep_smallest): - _LOGGER.debug(f"Testing http_client with input {input_sleep_smallest}.") - with http_client as local_client: - result = local_client.infer_sample(input_sleep_smallest) - assert result["OUTPUT_1"] == input_sleep_smallest - - @batch - def _infer_fn(**inputs): # noqa: N803 - _LOGGER.debug(f"Inputs: {inputs}") - return_value = { - "OUTPUT_1": inputs["INPUT_1"], - } - _LOGGER.debug(f"Return value {return_value}") - return return_value - - remote_model = "RemoteIdentity" - - with RemoteTriton(url=triton_server.http_url) as triton: - triton.bind( - model_name=remote_model, - infer_func=_infer_fn, - inputs=[ - Tensor(dtype=np.float64, shape=(-1, 1)), - ], - outputs=[ - Tensor(dtype=np.float64, shape=(-1, 1)), - ], - config=ModelConfig(max_batch_size=128), - ) - with ModelClient( - url=triton_server.http_url, - model_name=remote_model, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_GARGANTUAN_TIMEOUT, - lazy_init=False, - ) as remote_client: - result = remote_client.infer_sample(input_sleep_smallest) - assert result["OUTPUT_1"] == input_sleep_smallest - result = local_client.infer_sample(input_sleep_smallest) - assert result["OUTPUT_1"] == input_sleep_smallest - with create_client_from_url(triton_server.http_url) as tr_client: - assert not tr_client.is_model_ready(remote_model) - - -def test_local_and_remote_models_closes_client(triton_server, http_client, input_sleep_smallest): - _LOGGER.debug(f"Testing http_client with input {input_sleep_smallest}.") - remote_client = None - with http_client as local_client: - result = local_client.infer_sample(input_sleep_smallest) - assert result["OUTPUT_1"] == input_sleep_smallest - - @batch - def _infer_fn(**inputs): # noqa: N803 - _LOGGER.debug(f"Inputs: {inputs}") - return_value = { - "OUTPUT_1": inputs["INPUT_1"], - } - _LOGGER.debug(f"Return value {return_value}") - return return_value - - remote_model = "RemoteIdentity" - - with RemoteTriton(url=triton_server.http_url) as triton: - triton.bind( - model_name=remote_model, - infer_func=_infer_fn, - inputs=[ - Tensor(dtype=np.float64, shape=(-1, 1)), - ], - outputs=[ - Tensor(dtype=np.float64, shape=(-1, 1)), - ], - config=ModelConfig(max_batch_size=128), - ) - remote_client = ModelClient( - url=triton_server.http_url, - model_name=remote_model, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_GARGANTUAN_TIMEOUT, - lazy_init=False, - ) - result = remote_client.infer_sample(input_sleep_smallest) - assert result["OUTPUT_1"] == input_sleep_smallest - - with create_client_from_url(triton_server.http_url) as tr_client: - assert not tr_client.is_model_ready(remote_model) - - with pytest.raises(PyTritonClientInferenceServerError): - remote_client.infer_sample(input_sleep_smallest) - - with pytest.raises(PyTritonClientTimeoutError): - remote_client_for_dead_model = ModelClient( - url=triton_server.http_url, - model_name=remote_model, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_GARGANTUAN_TIMEOUT, - lazy_init=False, - ) - - with ModelClient( - url=triton_server.http_url, - model_name=remote_model, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_GARGANTUAN_TIMEOUT, - ) as remote_client_for_dead_model: - with pytest.raises(PyTritonClientTimeoutError): - remote_client_for_dead_model.infer_sample(input_sleep_smallest) - - -def test_local_and_remote_models_inflight_requests(triton_server, http_client, input_sleep_smallest): - _LOGGER.debug(f"Testing http_client with input {input_sleep_smallest}.") - with http_client as local_client: - result = local_client.infer_sample(input_sleep_smallest) - assert result["OUTPUT_1"] == input_sleep_smallest - - @batch - def _infer_fn(**inputs): # noqa: N803 - _LOGGER.debug(f"Inputs: {inputs}") - sleep_time = 5.0 - _LOGGER.info(f"Will sleep {sleep_time}s") - time.sleep(sleep_time) - return_value = { - "OUTPUT_1": inputs["INPUT_1"], - } - _LOGGER.debug(f"Return value {return_value}") - return return_value - - remote_model = "RemoteIdentity" - - futures_client = None - result_future = None - - with RemoteTriton(url=triton_server.http_url) as triton: - triton.bind( - model_name=remote_model, - infer_func=_infer_fn, - inputs=[ - Tensor(dtype=np.float64, shape=(-1, 1)), - ], - outputs=[ - Tensor(dtype=np.float64, shape=(-1, 1)), - ], - config=ModelConfig(max_batch_size=128), - ) - futures_client = FuturesModelClient( - url=triton_server.http_url, - model_name=remote_model, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_GARGANTUAN_TIMEOUT, - ) - result_future = futures_client.infer_sample(input_sleep_smallest) - - result = local_client.infer_sample(input_sleep_smallest) - assert result["OUTPUT_1"] == input_sleep_smallest - - # model waits until all requests are handled - result = result_future.result() - assert result["OUTPUT_1"] == input_sleep_smallest - - result = local_client.infer_sample(input_sleep_smallest) - assert result["OUTPUT_1"] == input_sleep_smallest - with create_client_from_url(triton_server.http_url) as tr_client: - assert not tr_client.is_model_ready(remote_model) - - if futures_client: - futures_client.close() - - -def test_local_and_remote_models_name_clash(triton_server, http_client, input_sleep_smallest): - _LOGGER.debug(f"Testing http_client with input {input_sleep_smallest}.") - i_was_called = False - - @batch - def _infer_fn(**inputs): # noqa: N803 - _LOGGER.debug(f"Inputs: {inputs}") - return_value = { - "OUTPUT_1": inputs["INPUT_1"], - } - _LOGGER.debug(f"Return value {return_value}") - nonlocal i_was_called - i_was_called = True - return return_value - - remote_model = triton_server.model_name - - with RemoteTriton(url=triton_server.http_url) as triton: - triton.bind( - model_name=remote_model, - infer_func=_infer_fn, - inputs=[ - Tensor(dtype=np.float64, shape=(-1, 1)), - ], - outputs=[ - Tensor(dtype=np.float64, shape=(-1, 1)), - ], - config=ModelConfig(max_batch_size=128), - ) - with ModelClient( - url=triton_server.http_url, - model_name=remote_model, - init_timeout_s=_GARGANTUAN_TIMEOUT, - inference_timeout_s=_GARGANTUAN_TIMEOUT, - lazy_init=False, - ) as remote_client: - result = remote_client.infer_sample(input_sleep_smallest) - assert result["OUTPUT_1"] == input_sleep_smallest - assert i_was_called - - with create_client_from_url(triton_server.http_url) as tr_client: - assert not tr_client.is_model_ready(remote_model) - - with http_client as local_client: - with pytest.raises(PyTritonClientTimeoutError): - local_client.infer_sample(input_sleep_smallest) diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_resources_released_after_signals/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_resources_released_after_signals/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_resources_released_after_signals/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_resources_released_after_signals/server.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_resources_released_after_signals/server.py deleted file mode 100644 index b5e041eefd0001e9d02f2ed6950e83226403ab52..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_resources_released_after_signals/server.py +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Run triton""" -import argparse -import logging -import random - -from pytriton.triton import Triton, TritonConfig -from tests.functional.common.models import ADD_SUB_PYTHON_MODEL -from tests.utils import find_free_port - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) - - -def main(): - from tests.utils import DEFAULT_LOG_FORMAT - - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument("--batch-size", type=int, default=32, help="Size of single inference batch") - parser.add_argument("--seed", type=int, help="PRNG seed", required=False) - parser.add_argument("--verbose", "-v", action="store_true", help="Timeout for test") - parser.add_argument("--grpc-port", type=int, help="Grpc triton port") - parser.add_argument("--http-port", type=int, help="Http triton port") - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - LOGGER.debug(f"CLI args: {args}") - - random.seed(args.seed) - - triton_config = TritonConfig(grpc_port=args.grpc_port, http_port=args.http_port, metrics_port=find_free_port()) - LOGGER.debug(f"Using {triton_config}") - with Triton(config=triton_config) as triton: - model_spec = ADD_SUB_PYTHON_MODEL - LOGGER.debug(f"Using {model_spec}") - triton.bind( - model_name=model_spec.name, - infer_func=model_spec.create_infer_fn(), - inputs=model_spec.inputs, - outputs=model_spec.outputs, - config=model_spec.model_config, - ) - triton.serve() - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_resources_released_after_signals/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_resources_released_after_signals/test.py deleted file mode 100644 index 5ebec84b6a717297ddab13697afe1a5d29aacd4b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_resources_released_after_signals/test.py +++ /dev/null @@ -1,150 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests scenario for each of signals [SIGINT, SIGTERM]: -- starting server -- wait for server to be ready -- checks if resources are obtained - - shared memory -- sends signal to server script -- wait server shutdown (with timeout) -- checks if resources are released -""" -import argparse -import logging -import pathlib -import signal -import sys -import time - -from tests.utils import ProcessMonitoring # pytype: disable=import-error -from tests.utils import DEFAULT_LOG_FORMAT, ScriptThread, find_free_port - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) - -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", - "platforms": ["amd64", "arm64"], -} - - -def _run_infer(url, init_timeout_s, batch_size): - import numpy as np - - from pytriton.client import ModelClient - from tests.functional.common.models import ADD_SUB_PYTHON_MODEL - - model_spec = ADD_SUB_PYTHON_MODEL - a_batch = np.ones((batch_size, 1), dtype=np.float32) - b_batch = np.ones((batch_size, 1), dtype=np.float32) - - with ModelClient(url, model_spec.name, init_timeout_s=init_timeout_s) as client: - result_batch = client.infer_batch(a_batch, b_batch) - np.testing.assert_allclose(result_batch["add"], a_batch + b_batch) - np.testing.assert_allclose(result_batch["sub"], a_batch - b_batch) - - -def _check_resources_allocated(initial_shared_memory_files): - shared_memory_files = sorted(set(pathlib.Path("/dev/shm").rglob("*")) - set(initial_shared_memory_files)) - assert shared_memory_files, shared_memory_files - - -def _check_resources_released(initial_shared_memory_files): - shared_memory_files = sorted(set(pathlib.Path("/dev/shm").rglob("*")) - set(initial_shared_memory_files)) - assert not shared_memory_files, shared_memory_files - - -def _run_test(init_timeout_s, verbose, seed, signal_value, test_timeout_s): - import psutil - - start_time_s = time.time() - wait_time_s = min(test_timeout_s, 5) - - server_script_module = f"{__package__}.server" - - http_port = find_free_port() - grpc_port = find_free_port() - server_cmd = [ - "python", - "-m", - server_script_module, - "--http-port", - str(http_port), - "--grpc-port", - str(grpc_port), - "--seed", - str(seed), - ] - if verbose: - server_cmd.append("--verbose") - - initial_shared_memory_files = list(pathlib.Path("/dev/shm").rglob("*")) - with ScriptThread(server_cmd, name="server") as server_thread: - url = f"http://localhost:{http_port}" - _run_infer(url, init_timeout_s, batch_size=32) - monitoring = ProcessMonitoring(server_thread.process.pid) - elapsed_s = time.time() - start_time_s - - children_processes = server_thread.process.children(recursive=True) - LOGGER.info(f"Found children processes: {children_processes}") - - _check_resources_allocated(initial_shared_memory_files) - LOGGER.info(f"Sending {signal_value} to server script process ({server_thread.process})") - server_thread.process.send_signal(signal_value) - - LOGGER.info("Waiting for server script and all its children processes to finish") - - def _process_running_and_not_zombie(_process): - return _process.is_running() and _process.status() != psutil.STATUS_ZOMBIE - - while ( - server_thread.is_alive() or any(_process_running_and_not_zombie(child) for child in children_processes) - ) and elapsed_s <= test_timeout_s: - time.sleep(wait_time_s) - elapsed_s = time.time() - start_time_s - monitoring.dump_state() - - timeout = elapsed_s >= test_timeout_s and ( - server_thread.is_alive() or any(child.is_running() for child in children_processes) - ) - - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={test_timeout_s})") - sys.exit(-2) - else: - LOGGER.info("All processed terminated") - - _check_resources_released(initial_shared_memory_files) - - -def main(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--test-timeout-s", required=False, default=300, type=float, help="Timeout for each subtest performance" - ) - parser.add_argument("--init-timeout-s", required=False, default=300, type=float, help="Timeout for test") - parser.add_argument("--seed", type=int, help="PRNG seed", required=False) - parser.add_argument("--verbose", "-v", action="store_true", help="Provide verbose logs") - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - _run_test(args.init_timeout_s, args.verbose, args.seed, signal.SIGINT, args.test_timeout_s) - _run_test(args.init_timeout_s, args.verbose, args.seed, signal.SIGTERM, args.test_timeout_s) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_resources_released_after_signals/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_resources_released_after_signals/test.sh deleted file mode 100644 index 872b8b6c8483c010846378e4647dd7ce696d9cf6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_resources_released_after_signals/test.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" - -pip install numpy -python -m"${TEST_MODULE}" \ - --test-timeout-s 300 \ - --init-timeout-s 300 \ - --seed 2022112315 \ - --verbose diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_response_cache/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_response_cache/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_response_cache/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_response_cache/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_response_cache/test.py deleted file mode 100644 index 0816cb37dda8f7e270e781afdc9aa53c657ea9c6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_response_cache/test.py +++ /dev/null @@ -1,117 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests errors passing e2e""" -import argparse -import logging -import random - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", - "platforms": ["amd64", "arm64"], -} - - -def main(): - import numpy as np - - from pytriton.client import ModelClient - from pytriton.decorators import batch - from pytriton.model_config import ModelConfig, Tensor - from pytriton.triton import Triton, TritonConfig - from tests.utils import DEFAULT_LOG_FORMAT, find_free_port - - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--init-timeout-s", required=False, default=300, type=float, help="Timeout for server and models initialization" - ) - parser.add_argument( - "--shutdown-timeout-s", - required=False, - default=300, - type=float, - help="Timeout for server to shutdown on PyTritonUnrecoverableError", - ) - parser.add_argument("--seed", type=int, help="PRNG seed", required=False) - parser.add_argument("--verbose", "-v", action="store_true", help="Timeout for test") - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - LOGGER.debug(f"CLI args: {args}") - - class _InferFuncWrapper: - def __init__(self): - self.call_count = 0 - - @batch - def infer_func(self, **inputs): - a_batch, b_batch = inputs.values() - add_batch = a_batch + b_batch - sub_batch = a_batch - b_batch - self.call_count += 1 - return {"add": add_batch, "sub": sub_batch} - - random.seed(args.seed) - infer_func_wrapper = _InferFuncWrapper() - - triton_config = TritonConfig( - grpc_port=find_free_port(), - http_port=find_free_port(), - metrics_port=find_free_port(), - cache_config=[f"local,size={1024 * 1024}"], # 1 MB - ) - LOGGER.debug(f"Using {triton_config}") - with Triton(config=triton_config) as triton: - triton.bind( - model_name="AddSub", - infer_func=infer_func_wrapper.infer_func, - inputs=( - Tensor(dtype=np.float32, shape=(-1,)), - Tensor(dtype=np.float32, shape=(-1,)), - ), - outputs=( - Tensor(name="add", dtype=np.float32, shape=(-1,)), - Tensor(name="sub", dtype=np.float32, shape=(-1,)), - ), - config=ModelConfig(max_batch_size=16, response_cache=True), - ) - triton.run() - - batch_size = 16 - a_batch = np.ones((batch_size, 1), dtype=np.float32) - b_batch = np.ones((batch_size, 1), dtype=np.float32) - - protocol = random.choice(["http", "grpc"]) - protocol_port = getattr(triton_config, f"{protocol}_port") - url = f"{protocol}://localhost:{protocol_port}" - with ModelClient(url, "AddSub", init_timeout_s=args.init_timeout_s) as client: - for idx in range(10): - LOGGER.info(f"Sending request {idx + 1}") - result_batch = client.infer_batch(a_batch, b_batch) - LOGGER.info(f"Response obtained for {idx + 1}. Number of outputs: {len(result_batch)}") - - LOGGER.info("Validating response.") - np.testing.assert_allclose(result_batch["add"], a_batch + b_batch) - np.testing.assert_allclose(result_batch["sub"], a_batch - b_batch) - - LOGGER.info(f"Infer function requests count: {infer_func_wrapper.call_count}") - - assert infer_func_wrapper.call_count == 1 - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_response_cache/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_response_cache/test.sh deleted file mode 100644 index a75a6366daeb240b4de160e4cd144344866e2069..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_response_cache/test.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" - -python -m"${TEST_MODULE}" \ - --init-timeout-s 300 \ - --shutdown-timeout-s 60 \ - --verbose \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_in_thread/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_in_thread/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_in_thread/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_in_thread/server.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_in_thread/server.py deleted file mode 100644 index 64837287a1261ba00a833009c48fff508df956ed..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_in_thread/server.py +++ /dev/null @@ -1,116 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""e2e run triton in not main thread""" -import argparse -import logging -import random -import signal -import threading -import traceback -from typing import Any - -from pytriton.triton import Triton, TritonConfig -from tests.functional.common.models import ADD_SUB_PYTHON_MODEL -from tests.utils import find_free_port - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) - -signal_handled = False -cond = threading.Condition() - - -class MyTritonThread(threading.Thread): - def __init__(self, args): - super().__init__(daemon=True) - self.triton_config = None - self.exception_traceback = None - self.triton = None - self.args = args - - def run(self) -> None: - try: - assert self.args is not None - assert self.args.grpc_port is not None - assert self.args.http_port is not None - - self.triton_config = TritonConfig( - grpc_port=self.args.grpc_port, http_port=self.args.http_port, metrics_port=find_free_port() - ) - LOGGER.debug(f"Using {self.triton_config}") - self.triton = Triton(config=self.triton_config) - model_spec = ADD_SUB_PYTHON_MODEL - LOGGER.debug(f"Using {model_spec}") - self.triton.bind( - model_name=model_spec.name, - infer_func=model_spec.create_infer_fn(), - inputs=model_spec.inputs, - outputs=model_spec.outputs, - config=model_spec.model_config, - ) - self.triton.serve() - - except Exception: - self.exception_traceback = traceback.format_exc() - with cond: - cond.notify() - - -def signal_handler(_signal_num: Any, _) -> None: - with cond: - global signal_handled - signal_handled = True - cond.notify() - - -def main(): - from tests.utils import DEFAULT_LOG_FORMAT - - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--init-timeout-s", required=False, default=300, type=float, help="Timeout for server and models initialization" - ) - parser.add_argument("--batch-size", type=int, default=32, help="Size of single inference batch") - parser.add_argument("--seed", type=int, help="PRNG seed", required=False) - parser.add_argument("--verbose", "-v", action="store_true", help="Timeout for test") - parser.add_argument("--grpc-port", type=int, help="Grpc triton port") - parser.add_argument("--http-port", type=int, help="Http triton port") - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - LOGGER.debug(f"CLI args: {args}") - - random.seed(args.seed) - - signal.signal(signal.SIGINT, signal_handler) - signal.signal(signal.SIGTERM, signal_handler) - - th = MyTritonThread(args) - th.start() - - with cond: - cond.wait() - - assert signal_handled - assert th.triton is not None - th.triton.stop() - LOGGER.info("Signal handled and triton server properly stopped") - - assert th.exception_traceback is None, f"Raised {th.exception_traceback}" - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_in_thread/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_in_thread/test.py deleted file mode 100644 index 8907ad52391ea3ecf957374b44a193e9f6a5de6f..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_in_thread/test.py +++ /dev/null @@ -1,135 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests which: -- starting server -- wait for server to be ready -- sends SEGFAULT signal to server script -- waits till all children processes of server script finishes -""" -import argparse -import logging -import signal -import sys -import time - -from tests.utils import ProcessMonitoring # pytype: disable=import-error -from tests.utils import DEFAULT_LOG_FORMAT, ScriptThread, find_free_port - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) - -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", - "platforms": ["amd64", "arm64"], -} - - -def verify_client_output(client_output): - output1_match = "Signal handled and triton server properly stopped" in client_output - if not output1_match: - raise ValueError("Couldn't find expected result") - else: - LOGGER.info("Results matches expected results") - - -def run_infer(batch_size, init_timeout_s, http_port): - import numpy as np - - from pytriton.client import ModelClient - from tests.functional.common.models import ADD_SUB_PYTHON_MODEL - - model_spec = ADD_SUB_PYTHON_MODEL - a_batch = np.ones((batch_size, 1), dtype=np.float32) - b_batch = np.ones((batch_size, 1), dtype=np.float32) - - protocol = "http" - url = f"{protocol}://localhost:{http_port}" - with ModelClient(url, model_spec.name, init_timeout_s=init_timeout_s) as client: - result_batch = client.infer_batch(a_batch, b_batch) - np.testing.assert_allclose(result_batch["add"], a_batch + b_batch) - np.testing.assert_allclose(result_batch["sub"], a_batch - b_batch) - - -def main(): - import psutil - - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - parser.add_argument("--init-timeout-s", required=False, default=300, type=float, help="Timeout for test") - parser.add_argument("--seed", type=int, help="PRNG seed", required=False) - parser.add_argument("--verbose", "-v", action="store_true", help="Provide verbose logs") - parser.add_argument("--batch-size", type=int, default=32, help="Size of single inference batch") - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - start_time_s = time.time() - wait_time_s = min(args.timeout_s, 5) - - server_script_module = f"{__package__}.server" - - http_port = find_free_port() - grpc_port = find_free_port() - server_cmd = [ - "python", - "-m", - server_script_module, - "--http-port", - str(http_port), - "--grpc-port", - str(grpc_port), - "--seed", - str(args.seed), - ] - if args.verbose: - server_cmd.append("--verbose") - - with ScriptThread(server_cmd, name="server") as server_thread: - run_infer(args.batch_size, args.init_timeout_s, http_port) - elapsed_s = time.time() - start_time_s - - monitoring = ProcessMonitoring(server_thread.process.pid) - - children_processes = server_thread.process.children(recursive=True) - LOGGER.info(f"Found children processes: {children_processes}") - LOGGER.info(f"Sending SEGINT to server script process ({server_thread.process})") - server_thread.process.send_signal(signal.SIGINT) - - def _process_running_and_not_zombie(_process): - return _process.is_running() and _process.status() != psutil.STATUS_ZOMBIE - - while ( - server_thread.is_alive() or any(_process_running_and_not_zombie(child) for child in children_processes) - ) and elapsed_s <= args.timeout_s: - time.sleep(wait_time_s) - elapsed_s = time.time() - start_time_s - monitoring.dump_state() - - timeout = elapsed_s >= args.timeout_s and ( - server_thread.is_alive() or any(child.is_running() for child in children_processes) - ) - - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - else: - LOGGER.info("All processed terminated") - - verify_client_output(server_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_in_thread/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_in_thread/test.sh deleted file mode 100644 index c0c7157f2fe83153df0a0e67aef4d35457ac3558..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_in_thread/test.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" - -pip install numpy -python -m"${TEST_MODULE}" \ - --timeout-s 300 \ - --init-timeout-s 300 \ - --seed 2022101915 \ - --batch-size 32 \ - --verbose diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-10/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-10/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-10/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-10/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-10/test.py deleted file mode 100644 index e3640271c844e8be6cb4c922133fdcca8aa5e801..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-10/test.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""e2e tests inference on add_sub model""" -import argparse -import logging -import random - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "ubuntu:22.04", - "platforms": ["amd64", "arm64"], -} - - -def main(): - import numpy as np - - from pytriton.client import ModelClient - from pytriton.triton import Triton, TritonConfig - from tests.functional.common.models import ADD_SUB_PYTHON_MODEL - from tests.utils import DEFAULT_LOG_FORMAT, find_free_port - - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--init-timeout-s", required=False, default=300, type=float, help="Timeout for server and models initialization" - ) - parser.add_argument("--batch-size", type=int, default=32, help="Size of single inference batch") - parser.add_argument("--seed", type=int, help="PRNG seed", required=False) - parser.add_argument("--verbose", "-v", action="store_true", help="Timeout for test") - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - LOGGER.debug(f"CLI args: {args}") - - random.seed(args.seed) - - triton_config = TritonConfig(grpc_port=find_free_port(), http_port=find_free_port(), metrics_port=find_free_port()) - LOGGER.debug(f"Using {triton_config}") - - with Triton(config=triton_config) as triton: - model_spec = ADD_SUB_PYTHON_MODEL - LOGGER.debug(f"Using {model_spec}") - triton.bind( - model_name=model_spec.name, - infer_func=model_spec.create_infer_fn(), - inputs=model_spec.inputs, - outputs=model_spec.outputs, - config=model_spec.model_config, - ) - triton.run() - - batch_size = args.batch_size - a_batch = np.ones((batch_size, 1), dtype=np.float32) - b_batch = np.ones((batch_size, 1), dtype=np.float32) - - protocol = random.choice(["http", "grpc"]) - protocol_port = getattr(triton_config, f"{protocol}_port") - url = f"{protocol}://localhost:{protocol_port}" - with ModelClient(url, model_spec.name, init_timeout_s=args.init_timeout_s) as client: - result_batch = client.infer_batch(a_batch, b_batch) - np.testing.assert_allclose(result_batch["add"], a_batch + b_batch) - np.testing.assert_allclose(result_batch["sub"], a_batch - b_batch) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-10/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-10/test.sh deleted file mode 100644 index 563df376d63dd4c827001edc1e3925b362a79907..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-10/test.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" - -apt update -y -apt install -y software-properties-common - -add-apt-repository ppa:deadsnakes/ppa -y - -apt install -y python3.10 python3.10-dev libpython3.10 python3.10-distutils python3.10-venv python3-pip python-is-python3 \ - build-essential zlib1g-dev libncurses5-dev libgdbm-dev libnss3-dev libssl-dev libsqlite3-dev libreadline-dev \ - libffi-dev curl libbz2-dev pkg-config make - -python3.10 -m venv /opt/venv -source /opt/venv/bin/activate - -if [[ -d "${PYTRITON_DIST_DIR}" ]];then - export WHEEL_PATH=$(ls ${PYTRITON_DIST_DIR}/*pytriton*.whl) - pip install "${WHEEL_PATH}[dev]" -else - pip install nvidia-pytriton -fi - -python3 -m"${TEST_MODULE}" \ - --init-timeout-s 300 \ - --batch-size 32 \ - --seed 2022101915 \ - --verbose diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-10_pyenv/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-10_pyenv/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-10_pyenv/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-10_pyenv/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-10_pyenv/test.py deleted file mode 100644 index e3640271c844e8be6cb4c922133fdcca8aa5e801..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-10_pyenv/test.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""e2e tests inference on add_sub model""" -import argparse -import logging -import random - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "ubuntu:22.04", - "platforms": ["amd64", "arm64"], -} - - -def main(): - import numpy as np - - from pytriton.client import ModelClient - from pytriton.triton import Triton, TritonConfig - from tests.functional.common.models import ADD_SUB_PYTHON_MODEL - from tests.utils import DEFAULT_LOG_FORMAT, find_free_port - - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--init-timeout-s", required=False, default=300, type=float, help="Timeout for server and models initialization" - ) - parser.add_argument("--batch-size", type=int, default=32, help="Size of single inference batch") - parser.add_argument("--seed", type=int, help="PRNG seed", required=False) - parser.add_argument("--verbose", "-v", action="store_true", help="Timeout for test") - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - LOGGER.debug(f"CLI args: {args}") - - random.seed(args.seed) - - triton_config = TritonConfig(grpc_port=find_free_port(), http_port=find_free_port(), metrics_port=find_free_port()) - LOGGER.debug(f"Using {triton_config}") - - with Triton(config=triton_config) as triton: - model_spec = ADD_SUB_PYTHON_MODEL - LOGGER.debug(f"Using {model_spec}") - triton.bind( - model_name=model_spec.name, - infer_func=model_spec.create_infer_fn(), - inputs=model_spec.inputs, - outputs=model_spec.outputs, - config=model_spec.model_config, - ) - triton.run() - - batch_size = args.batch_size - a_batch = np.ones((batch_size, 1), dtype=np.float32) - b_batch = np.ones((batch_size, 1), dtype=np.float32) - - protocol = random.choice(["http", "grpc"]) - protocol_port = getattr(triton_config, f"{protocol}_port") - url = f"{protocol}://localhost:{protocol_port}" - with ModelClient(url, model_spec.name, init_timeout_s=args.init_timeout_s) as client: - result_batch = client.infer_batch(a_batch, b_batch) - np.testing.assert_allclose(result_batch["add"], a_batch + b_batch) - np.testing.assert_allclose(result_batch["sub"], a_batch - b_batch) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-10_pyenv/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-10_pyenv/test.sh deleted file mode 100644 index 2dd02859af8b29c84d530988bf00ead7041d66ba..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-10_pyenv/test.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" - -apt update -# need git and build dependencies https://github.com/pyenv/pyenv/wiki#suggested-build-environment -apt install -y python3 python3-distutils python-is-python3 git \ - build-essential libssl-dev zlib1g-dev \ - libbz2-dev libreadline-dev libsqlite3-dev curl \ - libncursesw5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev - -curl https://pyenv.run | bash -export PYENV_ROOT="$HOME/.pyenv" -command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH" -eval "$(pyenv init -)" -eval "$(pyenv virtualenv-init -)" - -env PYTHON_CONFIGURE_OPTS="--enable-shared" pyenv install 3.10 -pyenv virtualenv 3.10 venv -pyenv activate venv - -export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$(pyenv virtualenv-prefix)/lib - -if [[ -d "${PYTRITON_DIST_DIR}" ]];then - export WHEEL_PATH=$(ls ${PYTRITON_DIST_DIR}/*pytriton*.whl) - pip install "${WHEEL_PATH}[dev]" -else - pip install nvidia-pytriton -fi - -python -m"${TEST_MODULE}" \ - --init-timeout-s 300 \ - --batch-size 32 \ - --seed 2022101915 \ - --verbose diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-11/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-11/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-11/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-11/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-11/test.py deleted file mode 100644 index e3640271c844e8be6cb4c922133fdcca8aa5e801..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-11/test.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""e2e tests inference on add_sub model""" -import argparse -import logging -import random - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "ubuntu:22.04", - "platforms": ["amd64", "arm64"], -} - - -def main(): - import numpy as np - - from pytriton.client import ModelClient - from pytriton.triton import Triton, TritonConfig - from tests.functional.common.models import ADD_SUB_PYTHON_MODEL - from tests.utils import DEFAULT_LOG_FORMAT, find_free_port - - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--init-timeout-s", required=False, default=300, type=float, help="Timeout for server and models initialization" - ) - parser.add_argument("--batch-size", type=int, default=32, help="Size of single inference batch") - parser.add_argument("--seed", type=int, help="PRNG seed", required=False) - parser.add_argument("--verbose", "-v", action="store_true", help="Timeout for test") - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - LOGGER.debug(f"CLI args: {args}") - - random.seed(args.seed) - - triton_config = TritonConfig(grpc_port=find_free_port(), http_port=find_free_port(), metrics_port=find_free_port()) - LOGGER.debug(f"Using {triton_config}") - - with Triton(config=triton_config) as triton: - model_spec = ADD_SUB_PYTHON_MODEL - LOGGER.debug(f"Using {model_spec}") - triton.bind( - model_name=model_spec.name, - infer_func=model_spec.create_infer_fn(), - inputs=model_spec.inputs, - outputs=model_spec.outputs, - config=model_spec.model_config, - ) - triton.run() - - batch_size = args.batch_size - a_batch = np.ones((batch_size, 1), dtype=np.float32) - b_batch = np.ones((batch_size, 1), dtype=np.float32) - - protocol = random.choice(["http", "grpc"]) - protocol_port = getattr(triton_config, f"{protocol}_port") - url = f"{protocol}://localhost:{protocol_port}" - with ModelClient(url, model_spec.name, init_timeout_s=args.init_timeout_s) as client: - result_batch = client.infer_batch(a_batch, b_batch) - np.testing.assert_allclose(result_batch["add"], a_batch + b_batch) - np.testing.assert_allclose(result_batch["sub"], a_batch - b_batch) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-11/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-11/test.sh deleted file mode 100644 index 348a017758c979d0238c61b2bba366805888f921..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-11/test.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" - -apt update -y -apt install -y software-properties-common - -add-apt-repository ppa:deadsnakes/ppa -y - -apt install -y python3.11 python3.11-dev python3.11-dev libpython3.11 python3.11-distutils python3.11-venv python3-pip python-is-python3 \ - build-essential zlib1g-dev libncurses5-dev libgdbm-dev libnss3-dev libssl-dev libsqlite3-dev libreadline-dev \ - libffi-dev curl libbz2-dev pkg-config make - -python3.11 -m venv /opt/venv -source /opt/venv/bin/activate - -if [[ -d "${PYTRITON_DIST_DIR}" ]];then - export WHEEL_PATH=$(ls ${PYTRITON_DIST_DIR}/*pytriton*.whl) - pip install "${WHEEL_PATH}[dev]" -else - pip install nvidia-pytriton -fi - -python -m"${TEST_MODULE}" \ - --init-timeout-s 300 \ - --batch-size 32 \ - --seed 2022101915 \ - --verbose diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-11_pyenv/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-11_pyenv/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-11_pyenv/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-11_pyenv/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-11_pyenv/test.py deleted file mode 100644 index e3640271c844e8be6cb4c922133fdcca8aa5e801..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-11_pyenv/test.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""e2e tests inference on add_sub model""" -import argparse -import logging -import random - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "ubuntu:22.04", - "platforms": ["amd64", "arm64"], -} - - -def main(): - import numpy as np - - from pytriton.client import ModelClient - from pytriton.triton import Triton, TritonConfig - from tests.functional.common.models import ADD_SUB_PYTHON_MODEL - from tests.utils import DEFAULT_LOG_FORMAT, find_free_port - - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--init-timeout-s", required=False, default=300, type=float, help="Timeout for server and models initialization" - ) - parser.add_argument("--batch-size", type=int, default=32, help="Size of single inference batch") - parser.add_argument("--seed", type=int, help="PRNG seed", required=False) - parser.add_argument("--verbose", "-v", action="store_true", help="Timeout for test") - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - LOGGER.debug(f"CLI args: {args}") - - random.seed(args.seed) - - triton_config = TritonConfig(grpc_port=find_free_port(), http_port=find_free_port(), metrics_port=find_free_port()) - LOGGER.debug(f"Using {triton_config}") - - with Triton(config=triton_config) as triton: - model_spec = ADD_SUB_PYTHON_MODEL - LOGGER.debug(f"Using {model_spec}") - triton.bind( - model_name=model_spec.name, - infer_func=model_spec.create_infer_fn(), - inputs=model_spec.inputs, - outputs=model_spec.outputs, - config=model_spec.model_config, - ) - triton.run() - - batch_size = args.batch_size - a_batch = np.ones((batch_size, 1), dtype=np.float32) - b_batch = np.ones((batch_size, 1), dtype=np.float32) - - protocol = random.choice(["http", "grpc"]) - protocol_port = getattr(triton_config, f"{protocol}_port") - url = f"{protocol}://localhost:{protocol_port}" - with ModelClient(url, model_spec.name, init_timeout_s=args.init_timeout_s) as client: - result_batch = client.infer_batch(a_batch, b_batch) - np.testing.assert_allclose(result_batch["add"], a_batch + b_batch) - np.testing.assert_allclose(result_batch["sub"], a_batch - b_batch) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-11_pyenv/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-11_pyenv/test.sh deleted file mode 100644 index 4fb996b138d3cd3d0927aec581bd0495c62e0489..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-11_pyenv/test.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" - -apt update -# need git and build dependencies https://github.com/pyenv/pyenv/wiki#suggested-build-environment -apt install -y python3 python3-distutils python-is-python3 git \ - build-essential libssl-dev zlib1g-dev \ - libbz2-dev libreadline-dev libsqlite3-dev curl \ - libncursesw5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev - -curl https://pyenv.run | bash -export PYENV_ROOT="$HOME/.pyenv" -command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH" -eval "$(pyenv init -)" -eval "$(pyenv virtualenv-init -)" - -env PYTHON_CONFIGURE_OPTS="--enable-shared" pyenv install 3.11 -pyenv virtualenv 3.11 venv -pyenv activate venv - -export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$(pyenv virtualenv-prefix)/lib - -if [[ -d "${PYTRITON_DIST_DIR}" ]];then - export WHEEL_PATH=$(ls ${PYTRITON_DIST_DIR}/*pytriton*.whl) - pip install "${WHEEL_PATH}[dev]" -else - pip install nvidia-pytriton -fi - -python -m"${TEST_MODULE}" \ - --init-timeout-s 300 \ - --batch-size 32 \ - --seed 2022101915 \ - --verbose diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-8/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-8/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-8/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-8/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-8/test.py deleted file mode 100644 index e3640271c844e8be6cb4c922133fdcca8aa5e801..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-8/test.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""e2e tests inference on add_sub model""" -import argparse -import logging -import random - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "ubuntu:22.04", - "platforms": ["amd64", "arm64"], -} - - -def main(): - import numpy as np - - from pytriton.client import ModelClient - from pytriton.triton import Triton, TritonConfig - from tests.functional.common.models import ADD_SUB_PYTHON_MODEL - from tests.utils import DEFAULT_LOG_FORMAT, find_free_port - - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--init-timeout-s", required=False, default=300, type=float, help="Timeout for server and models initialization" - ) - parser.add_argument("--batch-size", type=int, default=32, help="Size of single inference batch") - parser.add_argument("--seed", type=int, help="PRNG seed", required=False) - parser.add_argument("--verbose", "-v", action="store_true", help="Timeout for test") - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - LOGGER.debug(f"CLI args: {args}") - - random.seed(args.seed) - - triton_config = TritonConfig(grpc_port=find_free_port(), http_port=find_free_port(), metrics_port=find_free_port()) - LOGGER.debug(f"Using {triton_config}") - - with Triton(config=triton_config) as triton: - model_spec = ADD_SUB_PYTHON_MODEL - LOGGER.debug(f"Using {model_spec}") - triton.bind( - model_name=model_spec.name, - infer_func=model_spec.create_infer_fn(), - inputs=model_spec.inputs, - outputs=model_spec.outputs, - config=model_spec.model_config, - ) - triton.run() - - batch_size = args.batch_size - a_batch = np.ones((batch_size, 1), dtype=np.float32) - b_batch = np.ones((batch_size, 1), dtype=np.float32) - - protocol = random.choice(["http", "grpc"]) - protocol_port = getattr(triton_config, f"{protocol}_port") - url = f"{protocol}://localhost:{protocol_port}" - with ModelClient(url, model_spec.name, init_timeout_s=args.init_timeout_s) as client: - result_batch = client.infer_batch(a_batch, b_batch) - np.testing.assert_allclose(result_batch["add"], a_batch + b_batch) - np.testing.assert_allclose(result_batch["sub"], a_batch - b_batch) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-8/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-8/test.sh deleted file mode 100644 index 27c4a90a80688d456740952cde0e0c31db8a280c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-8/test.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" - -apt update -y -apt install -y software-properties-common - -add-apt-repository ppa:deadsnakes/ppa -y - -apt install -y python3.8 python3.8-dev libpython3.8 python3.8-distutils python3.8-venv python3-pip python-is-python3 \ - build-essential zlib1g-dev libncurses5-dev libgdbm-dev libnss3-dev libssl-dev libsqlite3-dev libreadline-dev \ - libffi-dev curl libbz2-dev pkg-config make - -python3.8 -m venv /opt/venv -source /opt/venv/bin/activate - -if [[ -d "${PYTRITON_DIST_DIR}" ]];then - export WHEEL_PATH=$(ls ${PYTRITON_DIST_DIR}/*pytriton*.whl) - pip install "${WHEEL_PATH}[dev]" -else - pip install nvidia-pytriton -fi - -python -m"${TEST_MODULE}" \ - --init-timeout-s 300 \ - --batch-size 32 \ - --seed 2022101915 \ - --verbose diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-8_pyenv/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-8_pyenv/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-8_pyenv/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-8_pyenv/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-8_pyenv/test.py deleted file mode 100644 index e3640271c844e8be6cb4c922133fdcca8aa5e801..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-8_pyenv/test.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""e2e tests inference on add_sub model""" -import argparse -import logging -import random - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "ubuntu:22.04", - "platforms": ["amd64", "arm64"], -} - - -def main(): - import numpy as np - - from pytriton.client import ModelClient - from pytriton.triton import Triton, TritonConfig - from tests.functional.common.models import ADD_SUB_PYTHON_MODEL - from tests.utils import DEFAULT_LOG_FORMAT, find_free_port - - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--init-timeout-s", required=False, default=300, type=float, help="Timeout for server and models initialization" - ) - parser.add_argument("--batch-size", type=int, default=32, help="Size of single inference batch") - parser.add_argument("--seed", type=int, help="PRNG seed", required=False) - parser.add_argument("--verbose", "-v", action="store_true", help="Timeout for test") - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - LOGGER.debug(f"CLI args: {args}") - - random.seed(args.seed) - - triton_config = TritonConfig(grpc_port=find_free_port(), http_port=find_free_port(), metrics_port=find_free_port()) - LOGGER.debug(f"Using {triton_config}") - - with Triton(config=triton_config) as triton: - model_spec = ADD_SUB_PYTHON_MODEL - LOGGER.debug(f"Using {model_spec}") - triton.bind( - model_name=model_spec.name, - infer_func=model_spec.create_infer_fn(), - inputs=model_spec.inputs, - outputs=model_spec.outputs, - config=model_spec.model_config, - ) - triton.run() - - batch_size = args.batch_size - a_batch = np.ones((batch_size, 1), dtype=np.float32) - b_batch = np.ones((batch_size, 1), dtype=np.float32) - - protocol = random.choice(["http", "grpc"]) - protocol_port = getattr(triton_config, f"{protocol}_port") - url = f"{protocol}://localhost:{protocol_port}" - with ModelClient(url, model_spec.name, init_timeout_s=args.init_timeout_s) as client: - result_batch = client.infer_batch(a_batch, b_batch) - np.testing.assert_allclose(result_batch["add"], a_batch + b_batch) - np.testing.assert_allclose(result_batch["sub"], a_batch - b_batch) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-8_pyenv/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-8_pyenv/test.sh deleted file mode 100644 index b60599a4f1399f968a2c4acad0ad4ca58ca6d37c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-8_pyenv/test.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" - -apt update -# need git and build dependencies https://github.com/pyenv/pyenv/wiki#suggested-build-environment -apt install -y python3 python3-distutils python-is-python3 git \ - build-essential libssl-dev zlib1g-dev \ - libbz2-dev libreadline-dev libsqlite3-dev curl \ - libncursesw5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev - -curl https://pyenv.run | bash -export PYENV_ROOT="$HOME/.pyenv" -command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH" -eval "$(pyenv init -)" -eval "$(pyenv virtualenv-init -)" - -env PYTHON_CONFIGURE_OPTS="--enable-shared" pyenv install 3.8 -pyenv virtualenv 3.8 venv -pyenv activate venv - -export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$(pyenv virtualenv-prefix)/lib - -if [[ -d "${PYTRITON_DIST_DIR}" ]];then - export WHEEL_PATH=$(ls ${PYTRITON_DIST_DIR}/*pytriton*.whl) - pip install "${WHEEL_PATH}[dev]" -else - pip install nvidia-pytriton -fi - -python -m"${TEST_MODULE}" \ - --init-timeout-s 300 \ - --batch-size 32 \ - --seed 2022101915 \ - --verbose diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-9/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-9/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-9/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-9/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-9/test.py deleted file mode 100644 index e3640271c844e8be6cb4c922133fdcca8aa5e801..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-9/test.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""e2e tests inference on add_sub model""" -import argparse -import logging -import random - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "ubuntu:22.04", - "platforms": ["amd64", "arm64"], -} - - -def main(): - import numpy as np - - from pytriton.client import ModelClient - from pytriton.triton import Triton, TritonConfig - from tests.functional.common.models import ADD_SUB_PYTHON_MODEL - from tests.utils import DEFAULT_LOG_FORMAT, find_free_port - - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--init-timeout-s", required=False, default=300, type=float, help="Timeout for server and models initialization" - ) - parser.add_argument("--batch-size", type=int, default=32, help="Size of single inference batch") - parser.add_argument("--seed", type=int, help="PRNG seed", required=False) - parser.add_argument("--verbose", "-v", action="store_true", help="Timeout for test") - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - LOGGER.debug(f"CLI args: {args}") - - random.seed(args.seed) - - triton_config = TritonConfig(grpc_port=find_free_port(), http_port=find_free_port(), metrics_port=find_free_port()) - LOGGER.debug(f"Using {triton_config}") - - with Triton(config=triton_config) as triton: - model_spec = ADD_SUB_PYTHON_MODEL - LOGGER.debug(f"Using {model_spec}") - triton.bind( - model_name=model_spec.name, - infer_func=model_spec.create_infer_fn(), - inputs=model_spec.inputs, - outputs=model_spec.outputs, - config=model_spec.model_config, - ) - triton.run() - - batch_size = args.batch_size - a_batch = np.ones((batch_size, 1), dtype=np.float32) - b_batch = np.ones((batch_size, 1), dtype=np.float32) - - protocol = random.choice(["http", "grpc"]) - protocol_port = getattr(triton_config, f"{protocol}_port") - url = f"{protocol}://localhost:{protocol_port}" - with ModelClient(url, model_spec.name, init_timeout_s=args.init_timeout_s) as client: - result_batch = client.infer_batch(a_batch, b_batch) - np.testing.assert_allclose(result_batch["add"], a_batch + b_batch) - np.testing.assert_allclose(result_batch["sub"], a_batch - b_batch) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-9/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-9/test.sh deleted file mode 100644 index d721526f80a127c782ed836bb555e74103e206cb..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-9/test.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" - -apt update -y -apt install -y software-properties-common - -add-apt-repository ppa:deadsnakes/ppa -y - -apt install -y python3.9 python3.9-dev libpython3.9 python3.9-distutils python3.9-venv python3-pip python-is-python3 \ - build-essential zlib1g-dev libncurses5-dev libgdbm-dev libnss3-dev libssl-dev libsqlite3-dev libreadline-dev \ - libffi-dev curl libbz2-dev pkg-config make - -python3.9 -m venv /opt/venv -source /opt/venv/bin/activate - -if [[ -d "${PYTRITON_DIST_DIR}" ]];then - export WHEEL_PATH=$(ls ${PYTRITON_DIST_DIR}/*pytriton*.whl) - pip install "${WHEEL_PATH}[dev]" -else - pip install nvidia-pytriton -fi - -python -m"${TEST_MODULE}" \ - --init-timeout-s 300 \ - --batch-size 32 \ - --seed 2022101915 \ - --verbose diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-9_pyenv/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-9_pyenv/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-9_pyenv/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-9_pyenv/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-9_pyenv/test.py deleted file mode 100644 index e3640271c844e8be6cb4c922133fdcca8aa5e801..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-9_pyenv/test.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""e2e tests inference on add_sub model""" -import argparse -import logging -import random - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "ubuntu:22.04", - "platforms": ["amd64", "arm64"], -} - - -def main(): - import numpy as np - - from pytriton.client import ModelClient - from pytriton.triton import Triton, TritonConfig - from tests.functional.common.models import ADD_SUB_PYTHON_MODEL - from tests.utils import DEFAULT_LOG_FORMAT, find_free_port - - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--init-timeout-s", required=False, default=300, type=float, help="Timeout for server and models initialization" - ) - parser.add_argument("--batch-size", type=int, default=32, help="Size of single inference batch") - parser.add_argument("--seed", type=int, help="PRNG seed", required=False) - parser.add_argument("--verbose", "-v", action="store_true", help="Timeout for test") - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - LOGGER.debug(f"CLI args: {args}") - - random.seed(args.seed) - - triton_config = TritonConfig(grpc_port=find_free_port(), http_port=find_free_port(), metrics_port=find_free_port()) - LOGGER.debug(f"Using {triton_config}") - - with Triton(config=triton_config) as triton: - model_spec = ADD_SUB_PYTHON_MODEL - LOGGER.debug(f"Using {model_spec}") - triton.bind( - model_name=model_spec.name, - infer_func=model_spec.create_infer_fn(), - inputs=model_spec.inputs, - outputs=model_spec.outputs, - config=model_spec.model_config, - ) - triton.run() - - batch_size = args.batch_size - a_batch = np.ones((batch_size, 1), dtype=np.float32) - b_batch = np.ones((batch_size, 1), dtype=np.float32) - - protocol = random.choice(["http", "grpc"]) - protocol_port = getattr(triton_config, f"{protocol}_port") - url = f"{protocol}://localhost:{protocol_port}" - with ModelClient(url, model_spec.name, init_timeout_s=args.init_timeout_s) as client: - result_batch = client.infer_batch(a_batch, b_batch) - np.testing.assert_allclose(result_batch["add"], a_batch + b_batch) - np.testing.assert_allclose(result_batch["sub"], a_batch - b_batch) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-9_pyenv/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-9_pyenv/test.sh deleted file mode 100644 index e3355fbe59f99384c9fff9f7547549a92d3df7e1..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_run_on_python_3-9_pyenv/test.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" - -apt update -# need git and build dependencies https://github.com/pyenv/pyenv/wiki#suggested-build-environment -apt install -y python3 python3-distutils python-is-python3 git \ - build-essential libssl-dev zlib1g-dev \ - libbz2-dev libreadline-dev libsqlite3-dev curl \ - libncursesw5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev - -curl https://pyenv.run | bash -export PYENV_ROOT="$HOME/.pyenv" -command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH" -eval "$(pyenv init -)" -eval "$(pyenv virtualenv-init -)" - -env PYTHON_CONFIGURE_OPTS="--enable-shared" pyenv install 3.9 -pyenv virtualenv 3.9 venv -pyenv activate venv - -export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$(pyenv virtualenv-prefix)/lib - -if [[ -d "${PYTRITON_DIST_DIR}" ]];then - export WHEEL_PATH=$(ls ${PYTRITON_DIST_DIR}/*pytriton*.whl) - pip install "${WHEEL_PATH}[dev]" -else - pip install nvidia-pytriton -fi - -python -m"${TEST_MODULE}" \ - --init-timeout-s 300 \ - --batch-size 32 \ - --seed 2022101915 \ - --verbose diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_triton_close_on_parent_death/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_triton_close_on_parent_death/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_triton_close_on_parent_death/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_triton_close_on_parent_death/server.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_triton_close_on_parent_death/server.py deleted file mode 100644 index 9c24d892924e0151399313f43c7e7e2f680a54e6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_triton_close_on_parent_death/server.py +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Server with add_sub model""" -import argparse -import logging -import random - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) - - -def main(): - from pytriton.triton import Triton, TritonConfig - from tests.functional.common.models import ADD_SUB_PYTHON_MODEL - from tests.utils import DEFAULT_LOG_FORMAT, find_free_port - - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument("--http-port", type=int, help="HTTP port on which server listens") - parser.add_argument("--seed", type=int, help="PRNG seed", required=False) - parser.add_argument("--verbose", "-v", action="store_true", help="Timeout for test") - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - LOGGER.debug(f"CLI args: {args}") - - random.seed(args.seed) - - triton_config = TritonConfig( - grpc_port=find_free_port(), - http_port=args.http_port or find_free_port(), - metrics_port=find_free_port(), - ) - LOGGER.debug(f"Using {triton_config}") - - with Triton(config=triton_config) as triton: - model_spec = ADD_SUB_PYTHON_MODEL - LOGGER.debug(f"Using {model_spec}") - triton.bind( - model_name=model_spec.name, - infer_func=model_spec.create_infer_fn(), - inputs=model_spec.inputs, - outputs=model_spec.outputs, - config=model_spec.model_config, - ) - triton.serve() - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_triton_close_on_parent_death/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_triton_close_on_parent_death/test.py deleted file mode 100644 index 8bfeb8196ab2e82dc4574b476c87ac7e62351018..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_triton_close_on_parent_death/test.py +++ /dev/null @@ -1,109 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests which: -- starting server -- wait for server to be ready -- sends SEGFAULT signal to server script -- waits till all children processes of server script finishes -""" -import argparse -import logging -import signal -import sys -import time - -from tests.utils import ( # pytype: disable=import-error - DEFAULT_LOG_FORMAT, - ProcessMonitoring, - ScriptThread, - find_free_port, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) - -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", - "platforms": ["amd64", "arm64"], -} - - -def main(): - import psutil - import pytest - import tritonclient.http - - from pytriton.client import ModelClient - from tests.functional.common.models import ADD_SUB_PYTHON_MODEL - - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - parser.add_argument("--seed", type=int, help="PRNG seed", required=False) - parser.add_argument("--verbose", "-v", action="store_true", help="Provide verbose logs") - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - start_time_s = time.time() - wait_time_s = min(args.timeout_s, 5) - - server_script_module = f"{__package__}.server" - - http_port = find_free_port() - server_cmd = ["python", "-m", server_script_module, "--http-port", str(http_port), "--seed", str(args.seed)] - if args.verbose: - server_cmd.append("--verbose") - - with ScriptThread(server_cmd, name="server") as server_thread: - ModelClient(f"http://localhost:{http_port}", ADD_SUB_PYTHON_MODEL.name).wait_for_model(timeout_s=args.timeout_s) - - monitoring = ProcessMonitoring(server_thread.process.pid) - elapsed_s = time.time() - start_time_s - - assert server_thread.is_alive() - - children_processes = server_thread.process.children(recursive=True) - LOGGER.info(f"Sending SEGFAULT to server script process ({server_thread.process})") - server_thread.process.send_signal(signal.SIGSEGV) - - def _process_running_and_not_zombie(_process): - return _process.is_running() and _process.status() != psutil.STATUS_ZOMBIE - - while ( - server_thread.is_alive() or any(_process_running_and_not_zombie(child) for child in children_processes) - ) and elapsed_s <= args.timeout_s: - time.sleep(wait_time_s) - elapsed_s = time.time() - start_time_s - monitoring.dump_state() - - with pytest.raises(ConnectionRefusedError): - tritonclient.http.InferenceServerClient(f"localhost:{http_port}").is_server_live() - - LOGGER.error("Triton HTTP socket is closed") - - timeout = elapsed_s >= args.timeout_s and ( - server_thread.is_alive() or any(child.is_running() for child in children_processes) - ) - - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - else: - LOGGER.info("All processed terminated") - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_triton_close_on_parent_death/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_triton_close_on_parent_death/test.sh deleted file mode 100644 index 4e99d4445d20f87eba0a7298e0aa0de650dc5eea..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_triton_close_on_parent_death/test.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" - -pip install numpy -python -m"${TEST_MODULE}" \ - --timeout-s 60 \ - --seed 2022110811 \ - --verbose diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_tritons_cohabitation/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_tritons_cohabitation/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_tritons_cohabitation/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_tritons_cohabitation/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_tritons_cohabitation/test.py deleted file mode 100644 index 6bc50c746b58970c8a98117359a3cfa35b37e758..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_tritons_cohabitation/test.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test metadata""" - -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", - "platforms": ["amd64", "arm64"], -} diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_tritons_cohabitation/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_tritons_cohabitation/test.sh deleted file mode 100644 index 7b0d0a691bd6622c7b767723f6157613cc34c759..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_tritons_cohabitation/test.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" - -pip install pytest-timeout numpy -pytest -svvv \ - --log-cli-level=DEBUG \ - --log-cli-format='%(asctime)s [%(levelname)s] [%(process)d:%(thread)d] %(name)s:%(lineno)d: %(message)s' \ - --timeout=60 \ - ${THIS_SCRIPT_DIR}/test_pytest.py diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_tritons_cohabitation/test_pytest.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_tritons_cohabitation/test_pytest.py deleted file mode 100644 index 0bebf0d252951ae2dbca6b37a7a196eea3501802..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_tritons_cohabitation/test_pytest.py +++ /dev/null @@ -1,180 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of network timeouts with pytest""" - -import logging -import socket -import time -from concurrent.futures import wait -from contextlib import closing - -import numpy as np -import pytest - -from pytriton.client import FuturesModelClient, ModelClient -from pytriton.decorators import batch -from pytriton.model_config import ModelConfig, Tensor -from pytriton.triton import Triton, TritonConfig - -_LOGGER = logging.getLogger(__name__) - -_SMALL_TIMEOUT = 0.5 -_LARGE_TIMEOUT = 1.5 -_GARGANTUAN_TIMEOUT = 5.0 - - -def find_free_ports(): - with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s: - s.bind(("", 0)) - s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s2: - s2.bind(("", 0)) - s2.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s3: - s3.bind(("", 0)) - s3.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - return { - "http_port": s.getsockname()[1], - "grpc_port": s2.getsockname()[1], - "metrics_port": s3.getsockname()[1], - } - - -def triton_server_builder(ports): - @batch - def _infer_fn(**inputs): # noqa: N803 - _LOGGER.debug(f"Inputs: {inputs}") - sleep_time = inputs["INPUT_1"].squeeze().item() - _LOGGER.info(f"Will sleep {sleep_time}s") - time.sleep(sleep_time) - return_value = { - "OUTPUT_1": inputs["INPUT_1"], - } - _LOGGER.debug(f"Return value {return_value}") - return return_value - - class TritonInstance: - - """Context manager to hold Triton instance and ports""" - - def __init__(self, grpc_port, http_port, metrics_port, model_name, infer_function): - self.grpc_port = grpc_port - self.http_port = http_port - self.metrics_port = metrics_port - self.model_name = model_name - self.config = TritonConfig(http_port=http_port, grpc_port=grpc_port, metrics_port=metrics_port) - self.infer_function = infer_function - self.grpc_url = f"grpc://localhost:{self.grpc_port}" - self.http_url = f"http://localhost:{self.http_port}" - - def __enter__(self): - self.triton = Triton(config=self.config) - _LOGGER.debug(f"Binding {self.model_name} model.") - self.triton.bind( - model_name=self.model_name, - infer_func=self.infer_function, - inputs=[ - Tensor(dtype=np.float64, shape=(-1, 1)), - ], - outputs=[ - Tensor(dtype=np.float64, shape=(-1, 1)), - ], - config=ModelConfig(max_batch_size=128), - ) - _LOGGER.info("Running Triton server.") - self.triton.run() - return self - - def __exit__(self, exc_type, exc_value, traceback): - _LOGGER.debug("Triton server stopping.") - self.triton.stop() - - def get_model_client(self, init_timeout_s=_GARGANTUAN_TIMEOUT, inference_timeout_s=_GARGANTUAN_TIMEOUT): - _LOGGER.debug( - f"Creating model client with init_timeout_s={init_timeout_s} and inference_timeout_s={inference_timeout_s}" - ) - return ModelClient( - self.http_url, - self.model_name, - init_timeout_s=init_timeout_s, - inference_timeout_s=inference_timeout_s, - ) - - def get_model_futures_client(self, init_timeout_s=_GARGANTUAN_TIMEOUT, inference_timeout_s=_GARGANTUAN_TIMEOUT): - _LOGGER.debug( - f"Creating futures model client with init_timeout_s={init_timeout_s} and inference_timeout_s={inference_timeout_s}" - ) - return FuturesModelClient( - self.http_url, - self.model_name, - init_timeout_s=init_timeout_s, - inference_timeout_s=inference_timeout_s, - ) - - _LOGGER.debug(f"Using ports: {ports}") - with TritonInstance(**ports, model_name="Sleeper", infer_function=_infer_fn) as triton: - yield triton - - -# Define a fixture to create and return a Triton server instance -@pytest.fixture(scope="function") -def first_triton_server(): - _LOGGER.debug("Preparing first Triton server.") - ports = find_free_ports() - yield from triton_server_builder(ports) - - -# Define a fixture to create and return a Triton server instance -@pytest.fixture(scope="function") -def second_triton_server(): - _LOGGER.debug("Preparing second Triton server.") - ports = find_free_ports() - yield from triton_server_builder(ports) - - -@pytest.fixture(scope="function") -async def first_async_http_client(first_triton_server): - _LOGGER.debug( - f"Preparing client for {first_triton_server.http_url} with init timeout {_GARGANTUAN_TIMEOUT} and inference timeout {_SMALL_TIMEOUT}." - ) - yield first_triton_server.get_model_futures_client() - - -@pytest.fixture(scope="function") -async def second_async_http_client(second_triton_server): - _LOGGER.debug( - f"Preparing client for {second_triton_server.http_url} with init timeout {_GARGANTUAN_TIMEOUT} and inference timeout {_SMALL_TIMEOUT}." - ) - yield second_triton_server.get_model_futures_client() - - -@pytest.fixture(scope="session") -def input_sleep_large(): - _LOGGER.debug(f"Preparing input array with value {_LARGE_TIMEOUT}.") - yield np.array([[_LARGE_TIMEOUT]], dtype=np.float64) - - -def test_infer_sample_success_futures(first_async_http_client, second_async_http_client, input_sleep_large): - _LOGGER.debug(f"Testing async grpc_client with input {input_sleep_large}.") - with first_async_http_client as first_client: - with second_async_http_client as second_client: - first_future = first_client.infer_sample(input_sleep_large) - second_future = second_client.infer_sample(input_sleep_large) - done, _not_done = wait([first_future, second_future], timeout=_LARGE_TIMEOUT * 1.3) - assert len(done) == 2 - first = first_future.result() - second = second_future.result() - assert first["OUTPUT_1"] == input_sleep_large - assert second["OUTPUT_1"] == input_sleep_large diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_warning_on_too_verbose_level/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_warning_on_too_verbose_level/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_warning_on_too_verbose_level/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_warning_on_too_verbose_level/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L0_warning_on_too_verbose_level/test.py deleted file mode 100644 index 9d2fb53b40d8fa34e6978c3311cdfec2513d3a25..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_warning_on_too_verbose_level/test.py +++ /dev/null @@ -1,96 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests errors passing e2e""" -import argparse -import io -import logging -import random - -from tests.utils import search_warning_on_too_verbose_log_level - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/pytorch:{TEST_CONTAINER_VERSION}-py3", - "platforms": ["amd64", "arm64"], -} - - -def main(): - import numpy as np - - from pytriton.client import ModelClient - from pytriton.client.utils import wait_for_server_ready - from pytriton.decorators import batch - from pytriton.model_config import ModelConfig, Tensor - from pytriton.triton import Triton, TritonConfig - from tests.utils import DEFAULT_LOG_FORMAT, find_free_port - - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--init-timeout-s", required=False, default=300, type=float, help="Timeout for server and models initialization" - ) - parser.add_argument( - "--shutdown-timeout-s", - required=False, - default=300, - type=float, - help="Timeout for server to shutdown on PyTritonUnrecoverableError", - ) - parser.add_argument("--seed", type=int, help="PRNG seed", required=False) - parser.add_argument("--verbose", "-v", action="store_true", help="Timeout for test") - args = parser.parse_args() - - log_level = logging.DEBUG if args.verbose else logging.INFO - - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - handler = logging.StreamHandler(io.StringIO()) - handler.setLevel(log_level) - root_logger = logging.getLogger() - root_logger.addHandler(handler) - - LOGGER.debug(f"CLI args: {args}") - - random.seed(args.seed) - - triton_config = TritonConfig( - grpc_port=find_free_port(), http_port=find_free_port(), metrics_port=find_free_port(), log_verbose=4 - ) - LOGGER.debug(f"Using {triton_config}") - - @batch - def infer_fn(**inputs): - return inputs - - with Triton(config=triton_config) as triton: - triton.bind( - model_name="proxy", - infer_func=infer_fn, - inputs=[Tensor(dtype=np.float32, shape=(-1,))], - outputs=[Tensor(dtype=np.float32, shape=(-1,))], - config=ModelConfig(max_batch_size=128), - ) - triton.run() - client = ModelClient(f"http://localhost:{triton_config.http_port}", "Dummy") - wait_for_server_ready(client._general_client, timeout_s=args.init_timeout_s) - - # obtain logs from handler - logs = handler.stream.getvalue() - assert search_warning_on_too_verbose_log_level(logs) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L0_warning_on_too_verbose_level/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L0_warning_on_too_verbose_level/test.sh deleted file mode 100644 index 8b424f8a390b7fd95d8a9c31307e044af2623b7e..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L0_warning_on_too_verbose_level/test.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" - -python -m"${TEST_MODULE}" \ - --init-timeout-s 300 \ - --shutdown-timeout-s 60 \ - --verbose \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L1_example_nemo_megatron_gpt_prompt_learning/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L1_example_nemo_megatron_gpt_prompt_learning/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L1_example_nemo_megatron_gpt_prompt_learning/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L1_example_nemo_megatron_gpt_prompt_learning/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L1_example_nemo_megatron_gpt_prompt_learning/test.py deleted file mode 100644 index cb7224520163a92e9971ed4f0cdf388a8b2e7ffb..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L1_example_nemo_megatron_gpt_prompt_learning/test.py +++ /dev/null @@ -1,130 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of nemo_megatron_gpt_multinode example""" -import argparse -import logging -import re -import signal -import sys -import time - -from tests.utils import ( - DEFAULT_LOG_FORMAT, - ScriptThread, - get_current_container_version, - verify_docker_image_in_readme_same_as_tested, -) - -LOGGER = logging.getLogger((__package__ or "main").split(".")[-1]) -METADATA = { - "image_name": "nvcr.io/nvidia/nemo:23.06", -} - - -def verify_client_output(client_output): - expected_pattern = r"1 2 3 4" - output_match = re.search(expected_pattern, client_output, re.MULTILINE) - output_array = output_match.group(0) if output_match else None - if not output_array: - raise ValueError(f"Could not find {expected_pattern} in client output") - else: - LOGGER.info(f'Found "{expected_pattern}" in client output') - - # NeMo model might return neutral or positive sentiment for given task - both are acceptable in test - expected_patterns = [[r"neutral", r"positive"], [r"set the alarm"], [r"seven am"]] - for patterns in expected_patterns: - matches = [re.search(pattern, client_output, re.MULTILINE) for pattern in patterns] - output_array = [match.group(0) if match else None for match in matches] - - if not any(output_array): - raise ValueError( - f'Could not find any of patterns "{", ".join(patterns)}" in client output. Output: {client_output}' - ) - else: - LOGGER.info(f'Found at least one of patterns "{", ".join(patterns)}" in client output') - - -def main(): - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument("--timeout-s", required=False, default=300, type=float, help="Timeout for test") - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - docker_image_with_name = METADATA["image_name"].format(TEST_CONTAINER_VERSION=get_current_container_version()) - verify_docker_image_in_readme_same_as_tested( - "examples/nemo_megatron_gpt_multinode/README.md", docker_image_with_name - ) - - train_cmd = ["bash", "examples/nemo_megatron_gpt_multinode/train_prompt_learning_model.sh"] - with ScriptThread(train_cmd, name="train") as train_thread: - train_thread.join() - - if train_thread.returncode != 0: - raise RuntimeError(f"Train thread returned {train_thread.returncode}") - - start_time = time.time() - elapsed_s = 0 - wait_time_s = min(args.timeout_s, 1) - - server_cmd = [ - "python", - "examples/nemo_megatron_gpt_multinode/server.py", - "--prompt-model-path", - "sentiment_intent_slot_p_tuning.nemo", - "--verbose", - ] - client_cmd = [ - "python", - "examples/nemo_megatron_gpt_multinode/client.py", - "--prompts", - "1 2 3", - "sentiment|It estimates the operating profit to further improve from the third quarter.", - "intent_and_slot|set the alarm to seven am for work", - ] - - with ScriptThread(server_cmd, name="server") as server_thread: - with ScriptThread(client_cmd, name="client") as client_thread: - while server_thread.is_alive() and client_thread.is_alive() and elapsed_s < args.timeout_s: - client_thread.join(timeout=wait_time_s) - elapsed_s = time.time() - start_time - LOGGER.info( - "Interrupting client script process. server.alive=%s client.alive=%s elapsed_s=%s", - server_thread.is_alive(), - client_thread.is_alive(), - elapsed_s, - ) - - elapsed_s = time.time() - start_time - LOGGER.info("Interrupting server script process. elapsed_s=%s", elapsed_s) - if server_thread.process: - server_thread.process.send_signal(signal.SIGINT) - - if client_thread.returncode != 0: - raise RuntimeError(f"Client returned {client_thread.returncode}") - if server_thread.returncode not in [0, -2]: # -2 is returned when process finished after receiving SIGINT signal - raise RuntimeError(f"Server returned {server_thread.returncode}") - - timeout = elapsed_s >= args.timeout_s and client_thread.is_alive() and server_thread.is_alive() - if timeout: - LOGGER.error(f"Timeout occurred (timeout_s={args.timeout_s})") - sys.exit(-2) - - verify_client_output(client_thread.output) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L1_example_nemo_megatron_gpt_prompt_learning/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L1_example_nemo_megatron_gpt_prompt_learning/test.sh deleted file mode 100644 index cc1aaec61042e39b9c81392abbc49b0afaca596d..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L1_example_nemo_megatron_gpt_prompt_learning/test.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_DIR="$(realpath --relative-to="${PWD}" "$(dirname "$0")")" -TEST_MODULE="${THIS_SCRIPT_DIR//\//.}.test" - -python -m"${TEST_MODULE}" \ - --timeout-s 3000 diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L1_futures_client_stress/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L1_futures_client_stress/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L1_futures_client_stress/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L1_futures_client_stress/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L1_futures_client_stress/test.py deleted file mode 100644 index 892fc7074c2eefeb33fa6dfb1a4567cb5b61690c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L1_futures_client_stress/test.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Runs short (50min) inference session over NLP model -""" -import argparse -import logging -import random - -logger = logging.getLogger(__package__) -METADATA = { - "image_name": "nvcr.io/nvidia/tensorflow:{TEST_CONTAINER_VERSION}-tf2-py3", - "shared_memory_size_mb": 512, -} - - -def main(): - from tests.functional.common.tests.client_stress import futures_stress_test - from tests.utils import DEFAULT_LOG_FORMAT - - parser = argparse.ArgumentParser(description="HuggigFace DistillBERT functional test.") - parser.add_argument( - "--test-time-s", - required=False, - default=3000, - type=int, - help="Time for how long the test should be run.", - ) - parser.add_argument( - "--init-timeout-s", - required=False, - default=300, - type=int, - help="Timeout for server and models initialization", - ) - parser.add_argument( - "--batch-size", - required=False, - default=16, - type=int, - help="Maximal batch size used for model deployment", - ) - parser.add_argument( - "--seed", - type=int, - help="PRNG seed", - required=False, - ) - parser.add_argument( - "--verbose", - action="store_true", - help="Provide verbose logs", - ) - args = parser.parse_args() - - random.seed(args.seed) - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - futures_stress_test( - test_time_s=args.test_time_s, - init_timeout_s=args.init_timeout_s, - batch_size=args.batch_size, - verbose=args.verbose, - seed=args.seed, - ) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L1_futures_client_stress/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L1_futures_client_stress/test.sh deleted file mode 100644 index 137ed2985bc094dfa2c60af74238146053029cb7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L1_futures_client_stress/test.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" - -python -m"${TEST_MODULE}" \ - --test-time-s 30 \ - --init-timeout-s 200 \ - --batch-size 16 \ - --seed 20221019 \ diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L1_hf_nlp_distilbert/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L1_hf_nlp_distilbert/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L1_hf_nlp_distilbert/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L1_hf_nlp_distilbert/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L1_hf_nlp_distilbert/test.py deleted file mode 100644 index a9607b96083391d637c9e82ab645f3ac00a08900..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L1_hf_nlp_distilbert/test.py +++ /dev/null @@ -1,91 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Runs short (50min) inference session over NLP model -""" -import argparse -import logging -import random - -logger = logging.getLogger(__package__) -METADATA = { - "image_name": "nvcr.io/nvidia/tensorflow:{TEST_CONTAINER_VERSION}-tf2-py3", - "shared_memory_size_mb": 512, -} - - -def main(): - from tests.functional.common.tests.hf_nlp_distilbert import huggingface_distilbert - from tests.utils import DEFAULT_LOG_FORMAT - - parser = argparse.ArgumentParser(description="HuggigFace DistillBERT functional test.") - parser.add_argument( - "--test-time-s", - required=False, - default=3000, - type=int, - help="Time for how long the test should be run.", - ) - parser.add_argument( - "--init-timeout-s", - required=False, - default=300, - type=int, - help="Timeout for server and models initialization", - ) - parser.add_argument( - "--batch-size", - required=False, - default=16, - type=int, - help="Maximal batch size used for model deployment", - ) - parser.add_argument( - "--sequence-length", - required=False, - default=128, - type=int, - help="Maximal sequence length", - ) - parser.add_argument( - "--seed", - type=int, - help="PRNG seed", - required=False, - ) - parser.add_argument( - "--verbose", - action="store_true", - help="Provide verbose logs", - ) - args = parser.parse_args() - - random.seed(args.seed) - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - huggingface_distilbert( - test_time_s=args.test_time_s, - init_timeout_s=args.init_timeout_s, - batch_size=args.batch_size, - sequence_length=args.sequence_length, - verbose=args.verbose, - ) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L1_hf_nlp_distilbert/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L1_hf_nlp_distilbert/test.sh deleted file mode 100644 index d48689b60679b5b175003c0d5cddb3c16c515bc9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L1_hf_nlp_distilbert/test.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" - -pip install transformers datasets --upgrade - -python -m"${TEST_MODULE}" \ - --test-time-s 3000 \ - --init-timeout-s 200 \ - --batch-size 16 \ - --sequence-length 128 \ - --seed 20221019 \ - --verbose diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L1_tfhub_image_detection/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L1_tfhub_image_detection/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L1_tfhub_image_detection/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L1_tfhub_image_detection/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L1_tfhub_image_detection/test.py deleted file mode 100644 index 09e9175ab4b8df04925b1a45f368099e6e552c2d..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L1_tfhub_image_detection/test.py +++ /dev/null @@ -1,82 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Runs short (50min) inference session over image detector model -""" -import argparse -import logging -import random - -METADATA = { - "image_name": "nvcr.io/nvidia/tensorflow:{TEST_CONTAINER_VERSION}-tf2-py3", - "shared_memory_size_mb": 768, -} - - -def main(): - from tests.functional.common.tests.tfhub_image_detection import tfhub_image_detection - from tests.utils import DEFAULT_LOG_FORMAT - - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument( - "--test-time-s", - required=False, - default=3000, - type=int, - help="Time for how long the test should be run.", - ) - parser.add_argument( - "--init-timeout-s", - required=False, - default=300, - type=int, - help="Timeout for server and models initialization", - ) - parser.add_argument( - "--batch-size", - required=False, - default=128, - type=int, - help="Maximal batch size used for model deployment", - ) - parser.add_argument( - "--seed", - type=int, - help="PRNG seed", - required=False, - ) - parser.add_argument( - "--verbose", - action="store_true", - help="Provide verbose logs", - ) - args = parser.parse_args() - - random.seed(args.seed) - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - tfhub_image_detection( - test_time_s=args.test_time_s, - init_timeout_s=args.init_timeout_s, - batch_size=args.batch_size, - verbose=args.verbose, - ) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L1_tfhub_image_detection/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L1_tfhub_image_detection/test.sh deleted file mode 100644 index 0e3c5a3a54d180ed3596e2c39e3ab2ccef63ab3e..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L1_tfhub_image_detection/test.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" - -python -m"${TEST_MODULE}" \ - --test-time-s 3000 \ - --init-timeout-s 200 \ - --batch-size 128 \ - --seed 20221019 \ - --verbose diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L2_asyncio_client_long_test/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L2_asyncio_client_long_test/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L2_asyncio_client_long_test/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L2_asyncio_client_long_test/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L2_asyncio_client_long_test/test.py deleted file mode 100644 index ce98edc4e1e187c23099bfdfdc602a8f14af7666..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L2_asyncio_client_long_test/test.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Runs long inference session with AsyncioModelClient over identity model -""" - - -import argparse -import asyncio -import logging -import random - -METADATA = { - "image_name": "nvcr.io/nvidia/tensorflow:{TEST_CONTAINER_VERSION}-tf2-py3", - "shared_memory_size_mb": 512, -} - - -async def main(): - from tests.functional.common.tests.client_stress import asyncio_stress_test - from tests.utils import TestMonitoringContext - - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument( - "--test-time-s", - required=False, - default=3000, - type=int, - help="Time for how long the test should be run.", - ) - parser.add_argument( - "--init-timeout-s", - required=False, - default=300, - type=int, - help="Timeout for server and models initialization", - ) - parser.add_argument( - "--batch-size", - required=False, - default=16, - type=int, - help="Maximal batch size used for model deployment", - ) - parser.add_argument( - "--seed", - type=int, - help="PRNG seed", - required=False, - ) - TestMonitoringContext.extend_args(parser) - args = parser.parse_args() - - random.seed(args.seed) - - logging.captureWarnings(True) - - with TestMonitoringContext(args): - await asyncio_stress_test( - test_time_s=args.test_time_s, - init_timeout_s=args.init_timeout_s, - batch_size=args.batch_size, - verbose=args.verbose, - seed=args.seed, - ) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L2_asyncio_client_long_test/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L2_asyncio_client_long_test/test.sh deleted file mode 100644 index ffdd8c3bc513a2698fa04eacbaecd8334c82580c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L2_asyncio_client_long_test/test.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" -LOGS_DIR=${LOGS_DIR:-$PWD} -# Create the logs folder if it does not exist -mkdir -p "$LOGS_DIR" - -# Set the log path with the date and time -LOG_PATH="$LOGS_DIR/log_$(date '+%Y-%m-%d_%H-%M-%S').txt" - -python -m"${TEST_MODULE}" \ - --test-time-s 36000 \ - --init-timeout-s 200 \ - --batch-size 16 \ - --seed 20221019 \ - --enable-fault-handler \ - --process-monitoring-interval 600 \ - --log-path "${LOG_PATH}" \ - --compress-logs diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L2_futures_client_long_test/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L2_futures_client_long_test/__init__.py deleted file mode 100644 index 44d6e3348407d341b0e6de72acb133e09b445df7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L2_futures_client_long_test/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L2_futures_client_long_test/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L2_futures_client_long_test/test.py deleted file mode 100644 index 001ccb8ce226a92d9cfceceeedf570632a82309f..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L2_futures_client_long_test/test.py +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Runs long inference session with FuturesModelClient over identity model -""" - - -import argparse -import logging -import random - -METADATA = { - "image_name": "nvcr.io/nvidia/tensorflow:{TEST_CONTAINER_VERSION}-tf2-py3", - "shared_memory_size_mb": 512, -} - - -def main(): - from tests.functional.common.tests.client_stress import futures_stress_test - from tests.utils import TestMonitoringContext - - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument( - "--test-time-s", - required=False, - default=3000, - type=int, - help="Time for how long the test should be run.", - ) - parser.add_argument( - "--init-timeout-s", - required=False, - default=300, - type=int, - help="Timeout for server and models initialization", - ) - parser.add_argument( - "--batch-size", - required=False, - default=16, - type=int, - help="Maximal batch size used for model deployment", - ) - parser.add_argument( - "--seed", - type=int, - help="PRNG seed", - required=False, - ) - TestMonitoringContext.extend_args(parser) - args = parser.parse_args() - - logging.captureWarnings(True) - - random.seed(args.seed) - with TestMonitoringContext(args): - futures_stress_test( - test_time_s=args.test_time_s, - init_timeout_s=args.init_timeout_s, - batch_size=args.batch_size, - verbose=args.verbose, - seed=args.seed, - ) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L2_futures_client_long_test/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L2_futures_client_long_test/test.sh deleted file mode 100644 index 9475c569954dc9b85dc843c5d2faaa6133e51641..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L2_futures_client_long_test/test.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" -LOGS_DIR=${LOGS_DIR:-$PWD} -# Create the logs folder if it does not exist -mkdir -p "$LOGS_DIR" - -# Set the log path with the date and time -LOG_PATH="$LOGS_DIR/log_$(date '+%Y-%m-%d_%H-%M-%S').txt" - -python -m"${TEST_MODULE}" \ - --test-time-s 36000 \ - --init-timeout-s 200 \ - --batch-size 16 \ - --seed 20221019 \ - --enable-fault-handler \ - --process-monitoring-interval 600 \ - --log-path "${LOG_PATH}" \ - --compress-logs \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L2_hf_nlp_distilbert/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L2_hf_nlp_distilbert/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L2_hf_nlp_distilbert/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L2_hf_nlp_distilbert/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L2_hf_nlp_distilbert/test.py deleted file mode 100644 index 1d151b5932a7c61a9957e5d7547cf75502ef1a54..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L2_hf_nlp_distilbert/test.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Runs longer (10h) inference session over NLP model -""" -import argparse -import logging -import random - -METADATA = { - "image_name": "nvcr.io/nvidia/tensorflow:{TEST_CONTAINER_VERSION}-tf2-py3", - "shared_memory_size_mb": 512, -} - - -def main(): - from tests.functional.common.tests.hf_nlp_distilbert import huggingface_distilbert - from tests.utils import TestMonitoringContext - - parser = argparse.ArgumentParser(description="HuggigFace DistillBERT functional test.") - parser.add_argument( - "--test-time-s", - required=False, - default=3000, - type=int, - help="Time for how long the test should be run.", - ) - parser.add_argument( - "--init-timeout-s", - required=False, - default=300, - type=int, - help="Timeout for server and models initialization", - ) - parser.add_argument( - "--batch-size", - required=False, - default=16, - type=int, - help="Maximal batch size used for model deployment", - ) - parser.add_argument( - "--sequence-length", - required=False, - default=128, - type=int, - help="Maximal sequence length", - ) - parser.add_argument( - "--seed", - type=int, - help="PRNG seed", - required=False, - ) - TestMonitoringContext.extend_args(parser) - args = parser.parse_args() - - random.seed(args.seed) - - logging.captureWarnings(True) - - with TestMonitoringContext(args): - huggingface_distilbert( - test_time_s=args.test_time_s, - init_timeout_s=args.init_timeout_s, - batch_size=args.batch_size, - verbose=args.verbose, - sequence_length=args.sequence_length, - ) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L2_hf_nlp_distilbert/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L2_hf_nlp_distilbert/test.sh deleted file mode 100644 index 150b4df82811505763ae9cc276462ad4de29499e..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L2_hf_nlp_distilbert/test.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" -LOGS_DIR=${LOGS_DIR:-$PWD} -# Create the logs folder if it does not exist -mkdir -p "$LOGS_DIR" - -# Set the log path with the date and time -LOG_PATH="$LOGS_DIR/log_$(date '+%Y-%m-%d_%H-%M-%S').txt" - -pip install transformers datasets --upgrade - -python -m"${TEST_MODULE}" \ - --test-time-s 36000 \ - --init-timeout-s 300 \ - --batch-size 16 \ - --sequence-length 128 \ - --seed 20221019 \ - --enable-fault-handler \ - --process-monitoring-interval 600 \ - --log-path "${LOG_PATH}" \ - --compress-logs diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L2_tfhub_image_detection/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/L2_tfhub_image_detection/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L2_tfhub_image_detection/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L2_tfhub_image_detection/test.py b/stf/stf-api-alternative/pytriton/tests/functional/L2_tfhub_image_detection/test.py deleted file mode 100644 index 76f509935963bd5089002bde9147f787a71d0253..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L2_tfhub_image_detection/test.py +++ /dev/null @@ -1,82 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Runs longer (10h) inference session over image detector model -""" -import argparse -import logging -import random - -METADATA = { - "image_name": "nvcr.io/nvidia/tensorflow:{TEST_CONTAINER_VERSION}-tf2-py3", - "shared_memory_size_mb": 768, -} - - -def main(): - from tests.functional.common.tests.tfhub_image_detection import tfhub_image_detection - from tests.utils import DEFAULT_LOG_FORMAT - - parser = argparse.ArgumentParser(description="short_description") - parser.add_argument( - "--test-time-s", - required=False, - default=36000, - type=int, - help="Time for how long the test should be run.", - ) - parser.add_argument( - "--init-timeout-s", - required=False, - default=300, - type=int, - help="Timeout for server and models initialization", - ) - parser.add_argument( - "--batch-size", - required=False, - default=128, - type=int, - help="Maximal batch size used for model deployment", - ) - parser.add_argument( - "--seed", - type=int, - help="PRNG seed", - required=False, - ) - parser.add_argument( - "--verbose", - action="store_true", - help="Provide verbose logs", - ) - args = parser.parse_args() - - random.seed(args.seed) - - log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=log_level, format=DEFAULT_LOG_FORMAT) - logging.captureWarnings(True) - - tfhub_image_detection( - test_time_s=args.test_time_s, - init_timeout_s=args.init_timeout_s, - batch_size=args.batch_size, - verbose=args.verbose, - ) - - -if __name__ == "__main__": - main() diff --git a/stf/stf-api-alternative/pytriton/tests/functional/L2_tfhub_image_detection/test.sh b/stf/stf-api-alternative/pytriton/tests/functional/L2_tfhub_image_detection/test.sh deleted file mode 100644 index 19c2bef3e2c2fd62fc35d512d39ee4f2cf9a476c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/L2_tfhub_image_detection/test.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -xe - -THIS_SCRIPT_PATH="$(realpath --relative-to="$(pwd)" "$0")" -TEST_MODULE="$(dirname "${THIS_SCRIPT_PATH}"|sed 's/\//./g').test" - -python -m"${TEST_MODULE}" \ - --test-time-s 36000 \ - --init-timeout-s 300 \ - --batch-size 128 \ - --seed 20221019 \ - --verbose diff --git a/stf/stf-api-alternative/pytriton/tests/functional/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/common/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/common/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/common/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/common/datasets.py b/stf/stf-api-alternative/pytriton/tests/functional/common/datasets.py deleted file mode 100644 index f847d4c7e0ba17e0ed39115c1fe4b1d1b95ea5e8..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/common/datasets.py +++ /dev/null @@ -1,144 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import dataclasses -import enum -import subprocess -from typing import Callable, Optional - - -class Framework(enum.Enum): - TENSORFLOW = "tensorflow" - PYTORCH = "pytorch" - - -@dataclasses.dataclass(frozen=True) -class DatasetSpec: - framework: Optional[Framework] - create_dataset_fn: Callable - - -COCO_LABELS = { - # 0: 'background', - 1: "person", - 2: "bicycle", - 3: "car", - 4: "motorcycle", - 5: "airplane", - 6: "bus", - 7: "train", - 8: "truck", - 9: "boat", - 10: "traffic light", - 11: "fire hydrant", - 13: "stop sign", - 14: "parking meter", - 15: "bench", - 16: "bird", - 17: "cat", - 18: "dog", - 19: "horse", - 20: "sheep", - 21: "cow", - 22: "elephant", - 23: "bear", - 24: "zebra", - 25: "giraffe", - 27: "backpack", - 28: "umbrella", - 31: "handbag", - 32: "tie", - 33: "suitcase", - 34: "frisbee", - 35: "skis", - 36: "snowboard", - 37: "sports ball", - 38: "kite", - 39: "baseball bat", - 40: "baseball glove", - 41: "skateboard", - 42: "surfboard", - 43: "tennis racket", - 44: "bottle", - 46: "wine glass", - 47: "cup", - 48: "fork", - 49: "knife", - 50: "spoon", - 51: "bowl", - 52: "banana", - 53: "apple", - 54: "sandwich", - 55: "orange", - 56: "broccoli", - 57: "carrot", - 58: "hot dog", - 59: "pizza", - 60: "donut", - 61: "cake", - 62: "chair", - 63: "couch", - 64: "potted plant", - 65: "bed", - 67: "dining table", - 70: "toilet", - 72: "tv", - 73: "laptop", - 74: "mouse", - 75: "remote", - 76: "keyboard", - 77: "cell phone", - 78: "microwave", - 79: "oven", - 80: "toaster", - 81: "sink", - 82: "refrigerator", - 84: "book", - 85: "clock", - 86: "vase", - 87: "scissors", - 88: "teddy bear", - 89: "hair drier", - 90: "toothbrush", -} - - -def _create_tfds_coco2017_validation(batch_size: Optional[int] = None) -> Callable: - subprocess.run(["pip", "install", "--upgrade", "tensorflow-datasets"], check=True) - - import tensorflow_datasets as tfds # pytype: disable=import-error - - return tfds.load("coco/2017", split="validation", as_supervised=True, with_info=True, batch_size=batch_size) - - -TFDS_COCO2017_VALIDATION_DATASET = DatasetSpec( - framework=Framework.TENSORFLOW, - create_dataset_fn=_create_tfds_coco2017_validation, -) - - -def _create_tfds_tf_flowers(batch_size: Optional[int] = None): - subprocess.run(["pip", "install", "--upgrade", "tensorflow-datasets"], check=True) - - import tensorflow_datasets as tfds # pytype: disable=import-error - - return tfds.load("tf_flowers", as_supervised=True, with_info=True, batch_size=batch_size) - - -TFDS_TF_FLOWERS_DATASET = DatasetSpec( - framework=Framework.TENSORFLOW, - create_dataset_fn=_create_tfds_tf_flowers, -) - - -DATASETS_CATALOGUE = [TFDS_COCO2017_VALIDATION_DATASET, TFDS_TF_FLOWERS_DATASET] diff --git a/stf/stf-api-alternative/pytriton/tests/functional/common/models.py b/stf/stf-api-alternative/pytriton/tests/functional/common/models.py deleted file mode 100644 index fe89f0e6dfa724d3376fecd8ba5fe90607a67fd6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/common/models.py +++ /dev/null @@ -1,212 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import dataclasses -import enum -import subprocess -from typing import Callable, Optional, Sequence - -import numpy as np - -from pytriton.decorators import batch, sample -from pytriton.model_config import ModelConfig, Tensor - - -class Framework(enum.Enum): - TENSORFLOW = "tensorflow" - PYTORCH = "pytorch" - - -@dataclasses.dataclass(frozen=True) -class TestModelSpec: - name: str - framework: Optional[Framework] - create_infer_fn: Callable[..., Callable] - inputs: Sequence[Tensor] - outputs: Sequence[Tensor] - model_config: ModelConfig - - -def _create_add_sub_fn() -> Callable: - @batch - def _add_sub(**inputs): - a_batch, b_batch = inputs.values() - add_batch = a_batch + b_batch - sub_batch = a_batch - b_batch - return {"add": add_batch, "sub": sub_batch} - - return _add_sub - - -ADD_SUB_PYTHON_MODEL = TestModelSpec( - name="AddSub", - framework=None, - create_infer_fn=_create_add_sub_fn, - inputs=( - Tensor(dtype=np.float32, shape=(-1,)), - Tensor(dtype=np.float32, shape=(-1,)), - ), - outputs=( - Tensor(name="add", dtype=np.float32, shape=(-1,)), - Tensor(name="sub", dtype=np.float32, shape=(-1,)), - ), - model_config=ModelConfig(max_batch_size=128), -) - - -def _create_identity_fn() -> Callable: - @batch - def _identity(**inputs): - (a_batch,) = inputs.values() - return {"identity": a_batch} - - return _identity - - -IDENTITY_PYTHON_MODEL = TestModelSpec( - name="Identity", - framework=None, - create_infer_fn=_create_identity_fn, - inputs=(Tensor(dtype=np.float32, shape=(-1,)),), - outputs=(Tensor(name="identity", dtype=np.float32, shape=(-1,)),), - model_config=ModelConfig(max_batch_size=128), -) - - -def _create_tfhub_tensorflow_efficientdet_lite0_detection_fn() -> Callable: - subprocess.run(["pip", "install", "tensorflow_hub"], check=True) - - import tensorflow_hub as hub # pytype: disable=import-error - - detector = hub.load("https://tfhub.dev/tensorflow/efficientdet/lite0/detection/1") - - @batch - def _tfhub_tensorflow_efficientdet_lite0_detection(image: np.ndarray): - boxes, scores, classes, num_detections = detector(image) - return { - "boxes": boxes.numpy(), - "scores": scores.numpy(), - "classes": classes.numpy(), - "num_detections": num_detections.numpy(), - } - - return _tfhub_tensorflow_efficientdet_lite0_detection - - -EFFICIENTDET_DETECTION_TF_MODEL = TestModelSpec( - name="EfficientDetDetector", - framework=Framework.TENSORFLOW, - create_infer_fn=_create_tfhub_tensorflow_efficientdet_lite0_detection_fn, - inputs=(Tensor(name="image", dtype=np.uint8, shape=(-1, -1, 3)),), - outputs=( - Tensor( - name="boxes", - dtype=np.float32, - shape=(-1, 4), - ), - Tensor( - name="scores", - dtype=np.float32, - shape=(-1,), - ), - Tensor( - name="classes", - dtype=np.int32, - shape=(-1,), - ), - Tensor(name="num_detections", dtype=np.int32, shape=(1,)), - ), - model_config=ModelConfig(max_batch_size=128), -) - - -def _create_hf_pytorch_bart_fn() -> Callable: - subprocess.run(["pip", "install", "transformers"], check=True) - - import transformers # pytype: disable=import-error - - classifier = transformers.pipeline("zero-shot-classification", model="facebook/bart-large-mnli", device=0) - - @sample - def _hf_pytorch_bart_fn(sequence: np.ndarray, labels: np.ndarray): - sequence = sequence[0].decode("utf-8") - labels = [label.decode("utf-8") for label in labels] - classification_result = classifier(sequence, labels) - scores_batch = np.array(classification_result["scores"], dtype=np.float32) - return {"scores": scores_batch} - - return _hf_pytorch_bart_fn - - -BART_CLASSIFIER_PYTORCH_MODEL = TestModelSpec( - name="BARTClassifier", - framework=Framework.PYTORCH, - create_infer_fn=_create_hf_pytorch_bart_fn, - inputs=(Tensor(name="sequence", dtype=object, shape=(-1,)), Tensor(name="labels", dtype=object, shape=(-1,))), - outputs=(Tensor(name="scores", dtype=np.float32, shape=(-1,)),), - model_config=ModelConfig(batching=False), -) - - -def _create_hf_jax_bert_fn() -> Callable: - subprocess.run( - [ - "pip", - "install", - "--upgrade", - "jax[cuda12_pip]", - "-f", - "https://storage.googleapis.com/jax-releases/jax_cuda_releases.html", - ], - check=True, - ) - subprocess.run(["pip", "install", "transformers", "flax"], check=True) - - import transformers # pytype: disable=import-error - - tokenizer = transformers.BertTokenizer.from_pretrained("bert-base-uncased") - model = transformers.FlaxBertModel.from_pretrained("bert-base-uncased") - - @batch - def _infer_fn(**inputs: np.ndarray): - (sequence_batch,) = inputs.values() - sequence_batch = sequence_batch.tolist() - last_hidden_states = [] - for sequence_item in sequence_batch: - # 0 needed to extract str from numpy array and deocode utf-8 - sequence_as_str = sequence_item[0].decode("utf-8") - tokenized_sequence = tokenizer(sequence_as_str, return_tensors="jax") - results = model(**tokenized_sequence) - last_hidden_states.append(results.last_hidden_state) - last_hidden_states = np.array(last_hidden_states, dtype=np.float32) - return [last_hidden_states] - - return _infer_fn - - -BERT_JAX_MODEL = TestModelSpec( - name="BERTJax", - framework=Framework.TENSORFLOW, - create_infer_fn=_create_hf_jax_bert_fn, - inputs=(Tensor(name="sequence", dtype=object, shape=(1,)),), - outputs=(Tensor(name="last_hidden_state", dtype=np.float32, shape=(-1,)),), - model_config=ModelConfig(max_batch_size=128), -) - -MODELS_CATALOGUE = [ - ADD_SUB_PYTHON_MODEL, - IDENTITY_PYTHON_MODEL, - EFFICIENTDET_DETECTION_TF_MODEL, - BART_CLASSIFIER_PYTORCH_MODEL, - BERT_JAX_MODEL, -] diff --git a/stf/stf-api-alternative/pytriton/tests/functional/common/tests/__init__.py b/stf/stf-api-alternative/pytriton/tests/functional/common/tests/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/common/tests/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/functional/common/tests/client_stress.py b/stf/stf-api-alternative/pytriton/tests/functional/common/tests/client_stress.py deleted file mode 100644 index 52c76d251810f6d3276600cef979e5dd6284e979..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/common/tests/client_stress.py +++ /dev/null @@ -1,291 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Runs inference session over NLP model -""" - -import logging -import pathlib -import tempfile -import textwrap -from concurrent.futures import FIRST_COMPLETED -from concurrent.futures import wait as futures_wait -from typing import Callable - -import numpy as np - -from pytriton.client import AsyncioModelClient, FuturesModelClient -from pytriton.decorators import batch -from pytriton.model_config import DynamicBatcher, ModelConfig, Tensor -from pytriton.triton import Triton, TritonConfig -from tests.functional.common.models import Framework, TestModelSpec -from tests.utils import find_free_port - -logger = logging.getLogger(__package__) - -VOCABULARY_SIZE = 30522 -VALID_TOKEN_ID = 5 -MIN_SEQUENCE_LENGTH = 20 -MAX_SEQUENCE_LENGTH = 128 - - -def futures_stress_test(test_time_s: int, init_timeout_s: int, batch_size: int, seed: int, verbose: bool): - model_name = "distilbert-base-uncased" - - model_spec = _model_spec() - - import random - - def requests_generator(): - while True: - inputs_len = random.randint(MIN_SEQUENCE_LENGTH, MAX_SEQUENCE_LENGTH) - input_ids = ( - np.zeros( - ( - 1, - inputs_len, - ), - dtype=np.int64, - ) - + 5 - ) - attention_mask = np.ones( - ( - 1, - inputs_len, - ), - dtype=np.int64, - ) - yield {"input_ids": input_ids, "attention_mask": attention_mask} - - requests = requests_generator() - - logger.info("starting server") - - infer_fn = model_spec.create_infer_fn(model_name=model_name) - with tempfile.TemporaryDirectory() as temp_dir: - triton_log_path = pathlib.Path(temp_dir) / "triton.log" - try: - triton_config = TritonConfig( - grpc_port=find_free_port(), - http_port=find_free_port(), - metrics_port=find_free_port(), - log_verbose=int(verbose), - log_file=triton_log_path, - ) - with Triton(config=triton_config) as triton: - triton.bind( - model_name=model_spec.name, - infer_func=infer_fn, - inputs=model_spec.inputs, - outputs=model_spec.outputs, - config=model_spec.model_config, - ) - triton.run() - - # Set to infinity - previous_time_left = float("inf") - previous_number_of_requests = 0 - - # Send requests - url = f"http://localhost:{triton_config.http_port}" - with FuturesModelClient(url, model_spec.name, max_workers=batch_size) as client: - # Wait for model - client.wait_for_model(init_timeout_s).result() - - import time - - should_stop_at_s = time.time() + test_time_s - - number_of_processed_requests = 0 - - not_done = {*()} - for request in requests: - result_future = client.infer_batch(**request) - not_done.add(result_future) - if len(not_done) > batch_size: - done, not_done = futures_wait(not_done, return_when=FIRST_COMPLETED) - if len(done) > 0: - future = done.pop() - result = future.result() - number_of_processed_requests += len(done) - if number_of_processed_requests - previous_number_of_requests > 10: - previous_number_of_requests = number_of_processed_requests - time_left_s = max(should_stop_at_s - time.time(), 0.0) - logger.info( - f"Processed {number_of_processed_requests} batches time left: {time_left_s:0.1f}s \n." - f"Result: {len(result)}." - ) - time_left_s = max(should_stop_at_s - time.time(), 0.0) - if previous_time_left - time_left_s > 10: - previous_time_left = time_left_s - logger.info(f"Time left: {time_left_s:0.1f}s") - if time_left_s <= 0: - break - logger.info(f"Test finished. Processed {number_of_processed_requests} requests") - - finally: - if triton_log_path.exists(): - logger.debug("-" * 64) - server_logs = triton_log_path.read_text(errors="replace") - server_logs = "--- triton logs:\n\n" + textwrap.indent(server_logs, prefix=" " * 8) - logger.debug(server_logs) - logger.info("Test finished") - - -async def asyncio_stress_test(test_time_s: int, init_timeout_s: int, batch_size: int, seed: int, verbose: bool): - model_name = "distilbert-base-uncased" - - model_spec = _model_spec() - - import random - - def requests_generator(): - while True: - inputs_len = random.randint(MIN_SEQUENCE_LENGTH, MAX_SEQUENCE_LENGTH) - input_ids = ( - np.zeros( - ( - 1, - inputs_len, - ), - dtype=np.int64, - ) - + 5 - ) - attention_mask = np.ones( - ( - 1, - inputs_len, - ), - dtype=np.int64, - ) - yield {"input_ids": input_ids, "attention_mask": attention_mask} - - requests = requests_generator() - - logger.info("starting server") - - infer_fn = model_spec.create_infer_fn(model_name=model_name) - with tempfile.TemporaryDirectory() as temp_dir: - triton_log_path = pathlib.Path(temp_dir) / "triton.log" - try: - triton_config = TritonConfig( - grpc_port=find_free_port(), - http_port=find_free_port(), - metrics_port=find_free_port(), - log_verbose=int(verbose), - log_file=triton_log_path, - ) - with Triton(config=triton_config) as triton: - triton.bind( - model_name=model_spec.name, - infer_func=infer_fn, - inputs=model_spec.inputs, - outputs=model_spec.outputs, - config=model_spec.model_config, - ) - triton.run() - - # Set to infinity - previous_time_left = float("inf") - previous_number_of_requests = 0 - - # Send requests - url = f"http://localhost:{triton_config.http_port}" - async with AsyncioModelClient(url, model_spec.name) as client: - # Wait for model - await client.wait_for_model(init_timeout_s) - - import asyncio - import time - - should_stop_at_s = time.time() + test_time_s - - number_of_processed_requests = 0 - - not_done = {*()} - for request in requests: - result_future = client.infer_batch(**request) - not_done.add(result_future) - if len(not_done) > batch_size: - done, not_done = await asyncio.wait(not_done, return_when=asyncio.FIRST_COMPLETED) - if len(done) > 0: - future = done.pop() - result = await future - number_of_processed_requests += len(done) - if number_of_processed_requests - previous_number_of_requests > 10: - previous_number_of_requests = number_of_processed_requests - time_left_s = max(should_stop_at_s - time.time(), 0.0) - logger.info( - f"Processed {number_of_processed_requests} batches time left: {time_left_s:0.1f}s \n." - f"Result: {len(result)}." - ) - time_left_s = max(should_stop_at_s - time.time(), 0.0) - if previous_time_left - time_left_s > 10: - previous_time_left = time_left_s - logger.info(f"Time left: {time_left_s:0.1f}s") - if time_left_s <= 0: - done, not_done = await asyncio.wait(not_done, return_when=asyncio.ALL_COMPLETED) - break - logger.info(f"Test finished. Processed {number_of_processed_requests} requests") - - finally: - if triton_log_path.exists(): - logger.debug("-" * 64) - server_logs = triton_log_path.read_text(errors="replace") - server_logs = "--- triton logs:\n\n" + textwrap.indent(server_logs, prefix=" " * 8) - logger.debug(server_logs) - logger.info("Test finished") - - -def _create_fake_bert_fn(model_name: str) -> Callable: - @batch - def _infer_fn(input_ids, attention_mask): - assert input_ids.shape == attention_mask.shape - import random - - outputs_len = random.randint(20, 128) - result = np.zeros([input_ids.shape[0], outputs_len, VOCABULARY_SIZE], dtype=np.float32) - logger.debug(f"input_ids: {input_ids.shape}") - logger.debug(f"attention_mask: {attention_mask.shape}") - return {"logits": result} - - return _infer_fn - - -def _model_spec() -> TestModelSpec: - model_spec = TestModelSpec( - name="FakeBert", - framework=Framework.TENSORFLOW, - create_infer_fn=_create_fake_bert_fn, - inputs=( - Tensor(name="input_ids", dtype=np.int64, shape=(-1,)), - Tensor(name="attention_mask", dtype=np.int64, shape=(-1,)), - ), - outputs=( - Tensor( - name="logits", - dtype=np.float32, - shape=(-1, -1), - ), - ), - model_config=ModelConfig( - max_batch_size=16, - batcher=DynamicBatcher( - max_queue_delay_microseconds=5000, - ), - ), - ) - return model_spec diff --git a/stf/stf-api-alternative/pytriton/tests/functional/common/tests/hf_nlp_distilbert.py b/stf/stf-api-alternative/pytriton/tests/functional/common/tests/hf_nlp_distilbert.py deleted file mode 100644 index 1bde5365fbe2ce2dd87a773823fb919b6eb92f31..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/common/tests/hf_nlp_distilbert.py +++ /dev/null @@ -1,205 +0,0 @@ -# Copyright (c) 2022-23, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Runs inference session over NLP model -""" - -import logging -import pathlib -import tempfile -import textwrap -from concurrent.futures import FIRST_COMPLETED, wait -from typing import Callable, List - -import numpy as np - -from pytriton.client import FuturesModelClient -from pytriton.decorators import batch -from pytriton.model_config import DynamicBatcher, ModelConfig, Tensor -from pytriton.triton import Triton, TritonConfig -from tests.functional.common.models import Framework, TestModelSpec -from tests.utils import find_free_port - -logger = logging.getLogger(__package__) - - -def huggingface_distilbert(test_time_s: int, init_timeout_s: int, batch_size: int, sequence_length: int, verbose: bool): - import tensorflow # pytype: disable=import-error - - gpus = tensorflow.config.experimental.list_physical_devices("GPU") - for gpu in gpus: - tensorflow.config.experimental.set_memory_growth(gpu, True) - - model_name = "distilbert-base-uncased" - - model_spec = _model_spec() - - logger.debug("generating dataset") - dataset = _dataset( - model_name=model_name, - dataset_name="imdb", - sequence_length=sequence_length, - input_names=[inpt.name for inpt in model_spec.inputs], - batch_size=1, - ) - - def requests_generator(): - for data_tensor in dataset: - input_ids = data_tensor["input_ids"].numpy() - attention_mask = data_tensor["attention_mask"].numpy() - for _ in range(batch_size): - yield {"input_ids": input_ids, "attention_mask": attention_mask} - - requests = list(requests_generator()) - - logger.debug("data generated") - - infer_fn = model_spec.create_infer_fn(model_name=model_name) - with tempfile.TemporaryDirectory() as temp_dir: - triton_log_path = pathlib.Path(temp_dir) / "triton.log" - try: - triton_config = TritonConfig( - grpc_port=find_free_port(), - http_port=find_free_port(), - metrics_port=find_free_port(), - log_verbose=int(verbose), - log_file=triton_log_path, - ) - with Triton(config=triton_config) as triton: - triton.bind( - model_name=model_spec.name, - infer_func=infer_fn, - inputs=model_spec.inputs, - outputs=model_spec.outputs, - config=model_spec.model_config, - ) - triton.run() - - logger.debug("Triton server started") - - # Send requests - url = f"http://localhost:{triton_config.http_port}" - with FuturesModelClient(url, model_spec.name, max_workers=batch_size) as client: - # Wait for model - client.wait_for_model(init_timeout_s).result() - - import time - - should_stop_at_s = time.time() + test_time_s - - number_of_processed_requests = 0 - - not_done = {*()} - for request in requests: - result_future = client.infer_batch(**request) - not_done.add(result_future) - if len(not_done) > batch_size: - done, not_done = wait(not_done, return_when=FIRST_COMPLETED) - if len(done) > 0: - future = done.pop() - result = future.result() - number_of_processed_requests += len(done) - if number_of_processed_requests > 0 and number_of_processed_requests % 10 == 0: - time_left_s = max(should_stop_at_s - time.time(), 0.0) - logger.debug( - f"Processed {number_of_processed_requests} batches time left: {time_left_s:0.1f}s \n." - f"Result: {len(result)}." - ) - time_left_s = max(should_stop_at_s - time.time(), 0.0) - if time_left_s <= 0: - break - logger.debug("Test finished") - - finally: - if triton_log_path.exists(): - logger.debug("-" * 64) - server_logs = triton_log_path.read_text(errors="replace") - server_logs = "--- triton logs:\n\n" + textwrap.indent(server_logs, prefix=" " * 8) - logger.debug(server_logs) - - -def _create_hf_tensorflow_distilbert_base_uncased_fn(model_name: str) -> Callable: - import tensorflow as tf - from transformers.models.distilbert.modeling_tf_distilbert import ( # pytype: disable=import-error - TFDistilBertForMaskedLM, - ) - - model = TFDistilBertForMaskedLM.from_pretrained(model_name) - model.config.return_dict = True - model.config.use_cache = False - - @batch - def _infer_fn(input_ids, attention_mask): - logger.debug(f"input_ids: {input_ids.shape}") - logger.debug(f"attention_mask: {attention_mask.shape}") - device = "/GPU:0" # change this to the GPU device you want to use - with tf.device(device): - result = model(input_ids, attention_mask) - return {"logits": result.logits.numpy()} - - return _infer_fn - - -def _model_spec() -> TestModelSpec: - model_spec = TestModelSpec( - name="DistilBert", - framework=Framework.TENSORFLOW, - create_infer_fn=_create_hf_tensorflow_distilbert_base_uncased_fn, - inputs=( - Tensor(name="input_ids", dtype=np.int64, shape=(-1,)), - Tensor(name="attention_mask", dtype=np.int64, shape=(-1,)), - ), - outputs=( - Tensor( - name="logits", - dtype=np.float32, - shape=(-1, -1), - ), - ), - model_config=ModelConfig( - max_batch_size=16, - batcher=DynamicBatcher( - max_queue_delay_microseconds=5000, - ), - ), - ) - return model_spec - - -def _dataset(model_name: str, dataset_name: str, sequence_length: int, input_names: List[str], batch_size: int): - from datasets import load_dataset # pytype: disable=import-error - from transformers import AutoTokenizer, DataCollatorWithPadding, TensorType # pytype: disable=import-error - - dataset = load_dataset(dataset_name)["train"] - tokenizer = AutoTokenizer.from_pretrained(model_name) - - def _preprocess_text_dataset(examples): - return tokenizer(examples["text"], truncation=True, max_length=sequence_length) - - tokenized_dataset = dataset.map(_preprocess_text_dataset, batched=True) - dataset = tokenized_dataset.remove_columns([c for c in tokenized_dataset.column_names if c not in input_names]) - - data_collator = DataCollatorWithPadding( - tokenizer=tokenizer, - padding="max_length", - max_length=sequence_length, - return_tensors=TensorType.NUMPY, - ) - - return dataset.to_tf_dataset( - columns=dataset.column_names, - shuffle=True, - batch_size=batch_size, - collate_fn=data_collator, - ) diff --git a/stf/stf-api-alternative/pytriton/tests/functional/common/tests/tfhub_image_detection.py b/stf/stf-api-alternative/pytriton/tests/functional/common/tests/tfhub_image_detection.py deleted file mode 100644 index 2c66cce5e6d54f70c4c6ff38a4db8ddd8e3f7440..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/functional/common/tests/tfhub_image_detection.py +++ /dev/null @@ -1,96 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Runs inference session over image detector model -""" -import collections -import logging -import pathlib -import tempfile -import textwrap -import time - - -def tfhub_image_detection(test_time_s: int, init_timeout_s: int, batch_size: int, verbose: bool): - from pytriton.client import ModelClient - from pytriton.triton import Triton, TritonConfig - from tests.functional.common.datasets import COCO_LABELS, TFDS_TF_FLOWERS_DATASET - from tests.functional.common.models import EFFICIENTDET_DETECTION_TF_MODEL - from tests.utils import find_free_port - - logger = logging.getLogger(__package__) - - model_spec = EFFICIENTDET_DETECTION_TF_MODEL - dataset_spec = TFDS_TF_FLOWERS_DATASET - expected_5_most_common_coco_labels = ("potted plant", "vase", "person", "bird", "tv") - - infer_fn = model_spec.create_infer_fn() - (dataset, dataset_info) = dataset_spec.create_dataset_fn(batch_size=batch_size) - classes = COCO_LABELS - - with tempfile.TemporaryDirectory() as temp_dir: - triton_log_path = pathlib.Path(temp_dir) / "triton.log" - try: - triton_config = TritonConfig( - grpc_port=find_free_port(), - http_port=find_free_port(), - metrics_port=find_free_port(), - log_verbose=int(verbose), - log_file=triton_log_path, - ) - with Triton(config=triton_config) as triton: - triton.bind( - model_name=model_spec.name, - infer_func=infer_fn, - inputs=model_spec.inputs, - outputs=model_spec.outputs, - config=model_spec.model_config, - ) - triton.run() - - classes_counter = collections.Counter() - url = f"http://localhost:{triton_config.http_port}" - with ModelClient(url, model_spec.name, init_timeout_s=init_timeout_s) as client: - should_stop_at_s = time.time() + test_time_s - dataset = dataset["train"].repeat() - for idx, (image_tensor, _) in enumerate(dataset): - image_data = image_tensor.numpy() # padded with 0 - result = client.infer_batch(image=image_data) - batch_classes = [ - result["classes"][idx, result["scores"][idx] >= 0.3] - for idx in range(result["scores"].shape[0]) - ] - classes_counter.update([classes[clazz] for classes_ in batch_classes for clazz in classes_]) - if idx > 0 and idx % 10 == 0: - time_left_s = max(should_stop_at_s - time.time(), 0.0) - logger.debug( - f"Processed {idx} batches time left: {time_left_s:0.1f}s " - f"most common labels: {classes_counter.most_common(5)} " - f"expected common labels: {expected_5_most_common_coco_labels}" - ) - - most_common = tuple(clazz for clazz, n in classes_counter.most_common(5)) - assert ( - len(set(most_common) ^ set(expected_5_most_common_coco_labels)) <= 4 - ), f"difference on {set(most_common) ^ set(expected_5_most_common_coco_labels)}" - if time_left_s <= 0: - break - - finally: - if triton_log_path.exists(): - logger.debug("-" * 64) - server_logs = triton_log_path.read_text(errors="replace") - server_logs = "--- triton logs:\n\n" + textwrap.indent(server_logs, prefix=" " * 8) - logger.debug(server_logs) diff --git a/stf/stf-api-alternative/pytriton/tests/unit/__init__.py b/stf/stf-api-alternative/pytriton/tests/unit/__init__.py deleted file mode 100644 index d66b825c6ee000d865c2741f5d2551e8cd5cb8b9..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/stf/stf-api-alternative/pytriton/tests/unit/assets/full_config.pbtxt b/stf/stf-api-alternative/pytriton/tests/unit/assets/full_config.pbtxt deleted file mode 100644 index 7fb1ae650b00e73ca88ba7b83a759926360736a1..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/assets/full_config.pbtxt +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -name: "simple" -platform: "tensorrt" -model_transaction_policy { - decoupled: true -} -max_batch_size: 16 -input { - name: "INPUT_1" - data_type: TYPE_FP32 - dims: -1 -} -input { - name: "INPUT_2" - data_type: TYPE_STRING - dims: -1 -} -output { - name: "OUTPUT_1" - data_type: TYPE_INT32 - dims: 1000 -} -instance_group { - count: 1 - kind: KIND_CPU -} -instance_group { - count: 2 - kind: KIND_GPU -} -dynamic_batching { - preferred_batch_size: 16 - preferred_batch_size: 32 - max_queue_delay_microseconds: 100 - preserve_ordering: true - priority_levels: 3 - default_priority_level: 1 - default_queue_policy { - timeout_action: DELAY - default_timeout_microseconds: 100 - allow_timeout_override: true - max_queue_size: 2 - } - priority_queue_policy { - key: 2 - value { - timeout_action: DELAY - default_timeout_microseconds: 100 - allow_timeout_override: true - max_queue_size: 3 - } - } -} -parameters { - key: "parameter1" - value { - string_value: "value1" - } -} -parameters { - key: "parameter2" - value { - string_value: "value2" - } -} -response_cache { - enable: true -} diff --git a/stf/stf-api-alternative/pytriton/tests/unit/assets/invalid_config.pbtxt b/stf/stf-api-alternative/pytriton/tests/unit/assets/invalid_config.pbtxt deleted file mode 100644 index b77ab1a8c35aff40b947b3b263f1cad456fb701a..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/assets/invalid_config.pbtxt +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -name: "simple" -platform: "tensorflow_graphdef" -max_batch_size: 8 -input [ - { - name: "INPUT0" - data_type: TYPE_INT32 - dims: [ ] - }, - { - name: "INPUT1" - data_type: TYPE_INT32 - dims: [ 16 ] - } -] -output [ - { - name: "OUTPUT0" - data_type: TYPE_INT32 - dims: [ 16 ] - }, - { - name: "OUTPUT1" - data_type: TYPE_INT32 - dims: [ 16 ] - } -] \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/unit/assets/valid_config.pbtxt b/stf/stf-api-alternative/pytriton/tests/unit/assets/valid_config.pbtxt deleted file mode 100644 index 1b014b8323ecda17d626c3fba51de1c724284f18..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/assets/valid_config.pbtxt +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -name: "simple" -platform: "tensorflow_graphdef" -max_batch_size: 8 -input [ - { - name: "INPUT0" - data_type: TYPE_INT32 - dims: [ 16 ] - }, - { - name: "INPUT1" - data_type: TYPE_INT32 - dims: [ 16 ] - } -] -output [ - { - name: "OUTPUT0" - data_type: TYPE_INT32 - dims: [ 16 ] - }, - { - name: "OUTPUT1" - data_type: TYPE_INT32 - dims: [ 16 ] - } -] \ No newline at end of file diff --git a/stf/stf-api-alternative/pytriton/tests/unit/client_common.py b/stf/stf-api-alternative/pytriton/tests/unit/client_common.py deleted file mode 100644 index 57c0f7f90cb672effce4c7a986a25ea45d808305..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/client_common.py +++ /dev/null @@ -1,132 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import json -import logging -from typing import Union - -import numpy as np -import wrapt -from google.protobuf import json_format # pytype: disable=pyi-error -from tritonclient.grpc import InferenceServerClient as SyncGrpcInferenceServerClient -from tritonclient.grpc import model_config_pb2, service_pb2 -from tritonclient.http import InferenceServerClient as SyncHttpInferenceServerClient -from tritonclient.http.aio import InferenceServerClient as AsyncioHttpInferenceServerClient - -from pytriton.model_config import DeviceKind -from pytriton.model_config.generator import ModelConfigGenerator -from pytriton.model_config.triton_model_config import TensorSpec, TritonModelConfig - -_LOGGER = logging.getLogger(__name__) - - -ADD_SUB_WITH_BATCHING_MODEL_CONFIG = TritonModelConfig( - model_name="AddSub", - model_version=1, - max_batch_size=16, - instance_group={DeviceKind.KIND_CPU: 1}, - inputs=[ - TensorSpec(name="a", shape=(-1, 1), dtype=np.float32), - TensorSpec(name="b", shape=(-1, 1), dtype=np.float32), - ], - outputs=[ - TensorSpec(name="add", shape=(-1, 1), dtype=np.float32), - TensorSpec(name="sub", shape=(-1, 1), dtype=np.float32), - ], - backend_parameters={"shared-memory-socket": "dummy/path"}, -) - -ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG = TritonModelConfig( - model_name="AddSub", - model_version=1, - batching=False, - instance_group={DeviceKind.KIND_CPU: 1}, - inputs=[ - TensorSpec(name="a", shape=(1,), dtype=np.float32), - TensorSpec(name="b", shape=(1,), dtype=np.float32), - ], - outputs=[ - TensorSpec(name="add", shape=(1,), dtype=np.float32), - TensorSpec(name="sub", shape=(1,), dtype=np.float32), - ], - backend_parameters={"shared-memory-socket": "dummy/path"}, -) - -GRPC_LOCALHOST_URL = "grpc://localhost:8001" -HTTP_LOCALHOST_URL_NO_SCHEME = "localhost:8000" -HTTP_LOCALHOST_URL = f"http://{HTTP_LOCALHOST_URL_NO_SCHEME}" - -EXPECTED_KWARGS_DEFAULT = { - "model_name": ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name, - "model_version": "", - "request_id": "0", - "parameters": None, - "headers": None, -} - -_TritonClientType = Union[ - AsyncioHttpInferenceServerClient, SyncHttpInferenceServerClient, SyncGrpcInferenceServerClient -] -_HttpTritonClientType = Union[AsyncioHttpInferenceServerClient, SyncHttpInferenceServerClient] -_GrpcTritonClientType = SyncGrpcInferenceServerClient - - -def patch_client__server_up_and_ready( - mocker, base_triton_client: _TritonClientType, ready_server: bool = True, live_server: bool = True -): - mocker.patch.object(base_triton_client, base_triton_client.is_server_ready.__name__).return_value = ready_server - mocker.patch.object(base_triton_client, base_triton_client.is_server_live.__name__).return_value = live_server - - -def patch_http_client__model_up_and_ready( - mocker, - model_config: TritonModelConfig, - base_triton_client: _HttpTritonClientType, - ready: bool = True, -): - mocker.patch.object(base_triton_client, base_triton_client.is_model_ready.__name__).return_value = ready - - model_config_dict = ModelConfigGenerator(model_config).get_config() - mock_get_model_config = mocker.patch.object(base_triton_client, base_triton_client.get_model_config.__name__) - mock_get_model_config.return_value = model_config_dict - - -def patch_grpc_client__model_up_and_ready( - mocker, - model_config: TritonModelConfig, - base_triton_client: _GrpcTritonClientType, - ready: bool = True, -): - def new_is_model_ready(model_name, model_version="", headers=None, parameters=None): - return ( - ready - and model_name == model_config.model_name - and (model_version == "" or model_version == str(model_config.model_version)) - ) - - mocker.patch.object(base_triton_client, base_triton_client.is_model_ready.__name__, side_effect=new_is_model_ready) - - model_config_dict = ModelConfigGenerator(model_config).get_config() - model_config_protobuf = json_format.ParseDict(model_config_dict, model_config_pb2.ModelConfig()) - response = service_pb2.ModelConfigResponse(config=model_config_protobuf) - response_dict = json.loads(json_format.MessageToJson(response, preserving_proto_field_name=True)) - mock_get_model_config = mocker.patch.object(base_triton_client, base_triton_client.get_model_config.__name__) - mock_get_model_config.return_value = response_dict - - -@wrapt.decorator -def patch_server_model_addsub_no_batch_ready(wrapped, _instance, _args, kwargs): - mocker = kwargs["mocker"] - patch_client__server_up_and_ready(mocker, SyncGrpcInferenceServerClient) - patch_grpc_client__model_up_and_ready(mocker, ADD_SUB_WITH_BATCHING_MODEL_CONFIG, SyncGrpcInferenceServerClient) - return wrapped(mocker) diff --git a/stf/stf-api-alternative/pytriton/tests/unit/common.py b/stf/stf-api-alternative/pytriton/tests/unit/common.py deleted file mode 100644 index 0353e678f159fd5115498e2161e019702557bcec..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/common.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import numpy as np - -from pytriton.model_config.common import DeviceKind, DynamicBatcher, QueuePolicy, TimeoutAction -from pytriton.model_config.triton_model_config import ResponseCache, TensorSpec, TritonModelConfig - -full_model_config = TritonModelConfig( - model_name="simple", - batching=True, - max_batch_size=16, - batcher=DynamicBatcher( - preferred_batch_size=[16, 32], - max_queue_delay_microseconds=100, - preserve_ordering=True, - priority_levels=3, - default_priority_level=1, - default_queue_policy=QueuePolicy( - allow_timeout_override=True, - timeout_action=TimeoutAction.DELAY, - default_timeout_microseconds=100, - max_queue_size=2, - ), - priority_queue_policy={ - 2: QueuePolicy( - allow_timeout_override=True, - timeout_action=TimeoutAction.DELAY, - default_timeout_microseconds=100, - max_queue_size=3, - ) - }, - ), - instance_group={DeviceKind.KIND_CPU: 1, DeviceKind.KIND_GPU: 2}, - decoupled=True, - backend_parameters={ - "parameter1": "value1", - "parameter2": "value2", - }, - inputs=[ - TensorSpec(name="INPUT_1", dtype=np.float32, shape=(-1,)), - TensorSpec(name="INPUT_2", dtype=np.bytes_, shape=(-1,)), - ], - outputs=[ - TensorSpec(name="OUTPUT_1", dtype=np.int32, shape=(1000,)), - ], - response_cache=ResponseCache(enable=True), -) diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_asyncio_client.py b/stf/stf-api-alternative/pytriton/tests/unit/test_asyncio_client.py deleted file mode 100644 index 4325c69a97d6a624c6fb6f2121df37f6ff65f46d..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_asyncio_client.py +++ /dev/null @@ -1,688 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import asyncio -import gc -import logging -import threading -import unittest -from unittest.mock import ANY - -import numpy as np -import pytest -from tritonclient.grpc.aio import InferenceServerClient as AsyncioGrpcInferenceServerClient -from tritonclient.http.aio import InferenceServerClient as AsyncioHttpInferenceServerClient - -from pytriton.client import AsyncioModelClient -from pytriton.client.asyncio_utils import asyncio_wait_for_model_ready -from pytriton.client.exceptions import ( - PyTritonClientInvalidUrlError, - PyTritonClientModelDoesntSupportBatchingError, - PyTritonClientTimeoutError, - PyTritonClientValueError, -) - -from .client_common import ( - ADD_SUB_WITH_BATCHING_MODEL_CONFIG, - ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG, - EXPECTED_KWARGS_DEFAULT, - GRPC_LOCALHOST_URL, - HTTP_LOCALHOST_URL, - HTTP_LOCALHOST_URL_NO_SCHEME, - patch_client__server_up_and_ready, - patch_grpc_client__model_up_and_ready, - patch_http_client__model_up_and_ready, -) -from .utils import ( - extract_array_from_http_infer_input, - verify_equalness_of_dicts_with_ndarray, - wrap_to_http_infer_result, -) - -_LOGGER = logging.getLogger(__name__) - -_MAX_TEST_TIME = 10.0 - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_utils_asyncio_wait_for_model_ready_http_client_not_ready_server(mocker): - patch_client__server_up_and_ready(mocker, AsyncioHttpInferenceServerClient, ready_server=False) - - triton_client = AsyncioHttpInferenceServerClient(url=HTTP_LOCALHOST_URL_NO_SCHEME, verbose=False) - try: - with pytest.raises(PyTritonClientTimeoutError): - await asyncio_wait_for_model_ready( - asyncio_client=triton_client, - model_name=ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name, - model_version=str(ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_version), - timeout_s=1, - ) - finally: - await triton_client.close() - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_utils_asyncio_wait_for_model_ready_http_client_not_live_server(mocker): - patch_client__server_up_and_ready(mocker, AsyncioHttpInferenceServerClient, live_server=False) - - triton_client = AsyncioHttpInferenceServerClient(url=HTTP_LOCALHOST_URL_NO_SCHEME, verbose=False) - try: - with pytest.raises(PyTritonClientTimeoutError): - await asyncio_wait_for_model_ready( - asyncio_client=triton_client, - model_name=ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name, - model_version=str(ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_version), - timeout_s=1, - ) - finally: - await triton_client.close() - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_utils_asyncio_wait_for_model_ready_http_client_model_not_ready(mocker): - patch_client__server_up_and_ready(mocker, AsyncioHttpInferenceServerClient) - patch_http_client__model_up_and_ready( - mocker, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG, AsyncioHttpInferenceServerClient, ready=False - ) - - triton_client = AsyncioHttpInferenceServerClient(url=HTTP_LOCALHOST_URL_NO_SCHEME, verbose=False) - try: - with pytest.raises(PyTritonClientTimeoutError): - await asyncio_wait_for_model_ready( - asyncio_client=triton_client, - model_name=ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name, - model_version=str(ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_version), - timeout_s=1, - ) - finally: - await triton_client.close() - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_utils_asyncio_wait_for_model_ready_http_client_success(mocker): - patch_client__server_up_and_ready(mocker, AsyncioHttpInferenceServerClient) - patch_http_client__model_up_and_ready( - mocker, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG, AsyncioHttpInferenceServerClient - ) - - triton_client = AsyncioHttpInferenceServerClient(url=HTTP_LOCALHOST_URL_NO_SCHEME, verbose=False) - await asyncio_wait_for_model_ready( - asyncio_client=triton_client, - model_name=ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name, - model_version=str(ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_version), - timeout_s=1, - ) - await triton_client.close() - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_client_init_raises_error_when_invalid_url_provided(mocker): - with pytest.raises(PyTritonClientInvalidUrlError): - async with AsyncioModelClient(["localhost:8001"], "dummy") as _: # pytype: disable=wrong-arg-types - pass - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_http_client_init_raises_error_when_use_non_lazy_init_on_non_responding_server(): - with pytest.raises(PyTritonClientTimeoutError): - async with AsyncioModelClient("dummy:43299", "dummy", lazy_init=False, init_timeout_s=1) as _: - pass - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_http_client_init_obtain_expected_model_config_when_lazy_init_is_disabled(mocker): - patch_client__server_up_and_ready(mocker, AsyncioHttpInferenceServerClient) - patch_http_client__model_up_and_ready(mocker, ADD_SUB_WITH_BATCHING_MODEL_CONFIG, AsyncioHttpInferenceServerClient) - - spy_client_init = mocker.spy(AsyncioHttpInferenceServerClient, AsyncioHttpInferenceServerClient.__init__.__name__) - client = AsyncioModelClient("http://localhost:8000", ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name, lazy_init=False) - await client.__aenter__() - # Exit sets some clients to none - general_client = client._general_client - infer_client = client._infer_client - await client.__aexit__(None, None, None) - assert spy_client_init.mock_calls == [ - unittest.mock.call(general_client, "localhost:8000", conn_timeout=60.0), - unittest.mock.call(infer_client, "localhost:8000", conn_timeout=60.0), - ] - assert await client.model_config == ADD_SUB_WITH_BATCHING_MODEL_CONFIG - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_http_client_model_config_raises_error_when_requested_unavailable_model(mocker): - patch_client__server_up_and_ready(mocker, AsyncioHttpInferenceServerClient) - - mocker.patch.object( - AsyncioHttpInferenceServerClient, AsyncioHttpInferenceServerClient.is_model_ready.__name__ - ).return_value = False - - with pytest.raises(PyTritonClientTimeoutError, match="Timeout while waiting for model"): - async with AsyncioModelClient(HTTP_LOCALHOST_URL, "NonExistentModel", init_timeout_s=1) as client: - _ = await client.model_config - - with pytest.raises(PyTritonClientTimeoutError, match="Timeout while waiting for model"): - async with AsyncioModelClient(HTTP_LOCALHOST_URL, "OtherName", "2", init_timeout_s=1) as client: - _ = await client.model_config - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_http_client_infer_raises_error_when_requested_unavailable_model(mocker): - patch_client__server_up_and_ready(mocker, AsyncioHttpInferenceServerClient) - mocker.patch.object( - AsyncioHttpInferenceServerClient, AsyncioHttpInferenceServerClient.is_model_ready.__name__ - ).return_value = False - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - - with pytest.raises(PyTritonClientTimeoutError, match="Timeout while waiting for model"): - async with AsyncioModelClient(HTTP_LOCALHOST_URL, "NonExistentModel", init_timeout_s=1) as client: - _ = await client.infer_sample(a, b) - - with pytest.raises(PyTritonClientTimeoutError, match="Timeout while waiting for model"): - async with AsyncioModelClient(HTTP_LOCALHOST_URL, "NonExistentModel", init_timeout_s=1) as client: - _ = await client.infer_batch(a, b) - - with pytest.raises(PyTritonClientTimeoutError, match="Timeout while waiting for model"): - async with AsyncioModelClient(HTTP_LOCALHOST_URL, "OtherName", "2", init_timeout_s=1) as client: - _ = await client.infer_sample(a, b) - - with pytest.raises(PyTritonClientTimeoutError, match="Timeout while waiting for model"): - async with AsyncioModelClient(HTTP_LOCALHOST_URL, "OtherName", "2", init_timeout_s=1) as client: - _ = await client.infer_batch(a, b) - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_http_client_infer_sample_returns_expected_result_when_infer_on_model_with_batching(mocker): - patch_client__server_up_and_ready(mocker, AsyncioHttpInferenceServerClient) - patch_http_client__model_up_and_ready(mocker, ADD_SUB_WITH_BATCHING_MODEL_CONFIG, AsyncioHttpInferenceServerClient) - - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - expected_result = {"add": a + b, "sub": a - b} - # server will return data with additional axis - server_result = {name: data[np.newaxis, ...] for name, data in expected_result.items()} - - async with AsyncioModelClient(HTTP_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - mock_infer = mocker.patch.object(client._infer_client, "infer") - mock_infer.return_value = wrap_to_http_infer_result(ADD_SUB_WITH_BATCHING_MODEL_CONFIG, "0", server_result) - result = await client.infer_sample(a, b) - - called_kwargs = mock_infer.call_args.kwargs - expected_kwargs = dict(EXPECTED_KWARGS_DEFAULT) - expected_kwargs.update( - { - # expect to send data with additional batch axis - "inputs": {"a": a[np.newaxis, ...], "b": b[np.newaxis, ...]}, - "outputs": list(expected_result), - } - ) - assert all( - called_kwargs.get(arg_name) == arg_value - for arg_name, arg_value in expected_kwargs.items() - if arg_name not in ["inputs", "outputs"] # inputs and outputs requires manual verification - ) - assert not [key for key in called_kwargs if key not in list(expected_kwargs)] - assert [output.name() for output in called_kwargs.get("outputs")] == list(expected_kwargs["outputs"]) - inputs_called_arg = {i.name(): extract_array_from_http_infer_input(i) for i in called_kwargs.get("inputs")} - verify_equalness_of_dicts_with_ndarray(inputs_called_arg, expected_kwargs["inputs"]) - - verify_equalness_of_dicts_with_ndarray(expected_result, result) - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_http_client_infer_sample_returns_expected_result_when_infer_on_model_with_batching_created_from_existing( - mocker, -): - patch_client__server_up_and_ready(mocker, AsyncioHttpInferenceServerClient) - patch_http_client__model_up_and_ready(mocker, ADD_SUB_WITH_BATCHING_MODEL_CONFIG, AsyncioHttpInferenceServerClient) - - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - expected_result = {"add": a + b, "sub": a - b} - # server will return data with additional axis - server_result = {name: data[np.newaxis, ...] for name, data in expected_result.items()} - - async with AsyncioModelClient(HTTP_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - mock_infer = mocker.patch.object(client._infer_client, "infer") - mock_infer.return_value = wrap_to_http_infer_result(ADD_SUB_WITH_BATCHING_MODEL_CONFIG, "0", server_result) - await client.infer_sample(a, b) - - async with AsyncioModelClient.from_existing_client(client) as client_from_existing: - mock_infer_from_existing = mocker.patch.object(client_from_existing._infer_client, "infer") - mock_infer_from_existing.return_value = wrap_to_http_infer_result( - ADD_SUB_WITH_BATCHING_MODEL_CONFIG, "0", server_result - ) - result = await client.infer_sample(a, b) - verify_equalness_of_dicts_with_ndarray(expected_result, result) - async with AsyncioModelClient( - HTTP_LOCALHOST_URL, - ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name, - model_config=await client.model_config, - ensure_model_is_ready=False, - ) as client_from_existing: - mock_infer_from_existing = mocker.patch.object(client_from_existing._infer_client, "infer") - mock_infer_from_existing.return_value = wrap_to_http_infer_result( - ADD_SUB_WITH_BATCHING_MODEL_CONFIG, "0", server_result - ) - result = await client.infer_sample(a, b) - verify_equalness_of_dicts_with_ndarray(expected_result, result) - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_http_client_infer_sample_returns_expected_result_when_positional_args_are_used(mocker): - patch_client__server_up_and_ready(mocker, AsyncioHttpInferenceServerClient) - patch_http_client__model_up_and_ready( - mocker, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG, AsyncioHttpInferenceServerClient - ) - - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - expected_result = {"add": a + b, "sub": a - b} - server_result = expected_result - - async with AsyncioModelClient(HTTP_LOCALHOST_URL, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name) as client: - mock_infer = mocker.patch.object(client._infer_client, "infer") - mock_infer.return_value = wrap_to_http_infer_result(ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG, "0", server_result) - result = await client.infer_sample(a, b) - - called_kwargs = mock_infer.call_args.kwargs - expected_kwargs = dict(EXPECTED_KWARGS_DEFAULT) - expected_kwargs.update( - { - "model_name": ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name, - "inputs": {"a": a, "b": b}, - "outputs": list(expected_result), - } - ) - assert all( - called_kwargs.get(arg_name) == arg_value - for arg_name, arg_value in expected_kwargs.items() - if arg_name not in ["inputs", "outputs"] # inputs and outputs requires manual verification - ) - assert not [key for key in called_kwargs if key not in list(expected_kwargs)] - assert [output.name() for output in called_kwargs.get("outputs")] == list(expected_kwargs["outputs"]) - inputs_called_arg = {i.name(): extract_array_from_http_infer_input(i) for i in called_kwargs.get("inputs")} - verify_equalness_of_dicts_with_ndarray(inputs_called_arg, expected_kwargs["inputs"]) - - verify_equalness_of_dicts_with_ndarray(expected_result, result) - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_http_client_infer_batch_returns_expected_result_when_positional_args_are_used(mocker): - patch_client__server_up_and_ready(mocker, AsyncioHttpInferenceServerClient) - patch_http_client__model_up_and_ready(mocker, ADD_SUB_WITH_BATCHING_MODEL_CONFIG, AsyncioHttpInferenceServerClient) - - a = np.array([[1], [1]], dtype=np.float32) - b = np.array([[1], [1]], dtype=np.float32) - expected_result = {"add": a + b, "sub": a - b} - server_result = expected_result - - async with AsyncioModelClient(HTTP_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - mock_infer = mocker.patch.object(client._infer_client, "infer") - mock_infer.return_value = wrap_to_http_infer_result(ADD_SUB_WITH_BATCHING_MODEL_CONFIG, "0", server_result) - result = await client.infer_batch(a, b) - - called_kwargs = mock_infer.call_args.kwargs - expected_kwargs = dict(EXPECTED_KWARGS_DEFAULT) - expected_kwargs.update( - { - "inputs": {"a": a, "b": b}, - "outputs": list(expected_result), - } - ) - assert all( - called_kwargs.get(arg_name) == arg_value - for arg_name, arg_value in expected_kwargs.items() - if arg_name not in ["inputs", "outputs"] # inputs and outputs requires manual verification - ) - assert not [key for key in called_kwargs if key not in list(expected_kwargs)] - assert [output.name() for output in called_kwargs.get("outputs")] == list(expected_kwargs["outputs"]) - inputs_called_arg = {i.name(): extract_array_from_http_infer_input(i) for i in called_kwargs.get("inputs")} - verify_equalness_of_dicts_with_ndarray(inputs_called_arg, expected_kwargs["inputs"]) - - verify_equalness_of_dicts_with_ndarray(expected_result, result) - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_http_client_infer_sample_returns_expected_result_when_named_args_are_used(mocker): - patch_client__server_up_and_ready(mocker, AsyncioHttpInferenceServerClient) - patch_http_client__model_up_and_ready( - mocker, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG, AsyncioHttpInferenceServerClient - ) - - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - expected_result = {"add": a + b, "sub": a - b} - server_result = {"add": a + b, "sub": a - b} - - async with AsyncioModelClient(HTTP_LOCALHOST_URL, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name) as client: - mock_infer = mocker.patch.object(client._infer_client, "infer") - mock_infer.return_value = wrap_to_http_infer_result(ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG, "0", server_result) - - inputs_dict = {"a": a, "b": b} - result = await client.infer_sample(**inputs_dict) - - called_kwargs = mock_infer.call_args.kwargs - expected_kwargs = dict(EXPECTED_KWARGS_DEFAULT) - expected_kwargs.update( - { - "model_name": ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name, - "inputs": inputs_dict, - "outputs": list(expected_result), - } - ) - assert all( - called_kwargs.get(arg_name) == arg_value - for arg_name, arg_value in expected_kwargs.items() - if arg_name not in ["inputs", "outputs"] # inputs and outputs requires manual verification - ) - assert not [key for key in called_kwargs if key not in list(expected_kwargs)] - assert [output.name() for output in called_kwargs.get("outputs")] == list(expected_kwargs["outputs"]) - inputs_called_arg = {i.name(): extract_array_from_http_infer_input(i) for i in called_kwargs.get("inputs")} - verify_equalness_of_dicts_with_ndarray(inputs_called_arg, expected_kwargs["inputs"]) - - verify_equalness_of_dicts_with_ndarray(expected_result, result) - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_http_client_infer_batch_returns_expected_result_when_named_args_are_used(mocker): - patch_client__server_up_and_ready(mocker, AsyncioHttpInferenceServerClient) - patch_http_client__model_up_and_ready(mocker, ADD_SUB_WITH_BATCHING_MODEL_CONFIG, AsyncioHttpInferenceServerClient) - - a = np.array([[1], [1]], dtype=np.float32) - b = np.array([[1], [1]], dtype=np.float32) - expected_result = {"add": a + b, "sub": a - b} - server_result = expected_result - - async with AsyncioModelClient(HTTP_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - mock_infer = mocker.patch.object(client._infer_client, "infer") - mock_infer.return_value = wrap_to_http_infer_result(ADD_SUB_WITH_BATCHING_MODEL_CONFIG, "0", server_result) - - inputs_dict = {"a": a, "b": b} - result = await client.infer_batch(**inputs_dict) - - called_kwargs = mock_infer.call_args.kwargs - expected_kwargs = dict(EXPECTED_KWARGS_DEFAULT) - expected_kwargs.update( - { - "inputs": inputs_dict, - "outputs": list(expected_result), - } - ) - assert all( - called_kwargs.get(arg_name) == arg_value - for arg_name, arg_value in expected_kwargs.items() - if arg_name not in ["inputs", "outputs"] # inputs and outputs requires manual verification - ) - assert not [key for key in called_kwargs if key not in list(expected_kwargs)] - assert [output.name() for output in called_kwargs.get("outputs")] == list(expected_kwargs["outputs"]) - inputs_called_arg = {i.name(): extract_array_from_http_infer_input(i) for i in called_kwargs.get("inputs")} - verify_equalness_of_dicts_with_ndarray(inputs_called_arg, expected_kwargs["inputs"]) - - verify_equalness_of_dicts_with_ndarray(expected_result, result) - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_http_client_infer_batch_raises_error_when_model_doesnt_support_batching(mocker): - patch_client__server_up_and_ready(mocker, AsyncioHttpInferenceServerClient) - patch_http_client__model_up_and_ready( - mocker, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG, AsyncioHttpInferenceServerClient - ) - - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - - async with AsyncioModelClient(HTTP_LOCALHOST_URL, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name) as client: - with pytest.raises(PyTritonClientModelDoesntSupportBatchingError): - await client.infer_batch(a, b) - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_http_client_infer_raises_error_when_mixed_args_convention_used(mocker): - patch_client__server_up_and_ready(mocker, AsyncioHttpInferenceServerClient) - patch_http_client__model_up_and_ready( - mocker, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG, AsyncioHttpInferenceServerClient - ) - - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - - async with AsyncioModelClient(HTTP_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - with pytest.raises( - PyTritonClientValueError, - match="Use either positional either keyword method arguments convention", - ): - await client.infer_sample(a, b=b) - - async with AsyncioModelClient(HTTP_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - with pytest.raises( - PyTritonClientValueError, - match="Use either positional either keyword method arguments convention", - ): - await client.infer_batch(a, b=b) - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_http_client_infer_raises_error_when_no_args_provided(mocker): - patch_client__server_up_and_ready(mocker, AsyncioHttpInferenceServerClient) - patch_http_client__model_up_and_ready( - mocker, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG, AsyncioHttpInferenceServerClient - ) - - async with AsyncioModelClient(HTTP_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - with pytest.raises(PyTritonClientValueError, match="Provide input data"): - await client.infer_sample() - - async with AsyncioModelClient(HTTP_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - with pytest.raises(PyTritonClientValueError, match="Provide input data"): - await client.infer_batch() - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -@pytest.mark.filterwarnings("error::pytest.PytestUnraisableExceptionWarning") -async def test_asynciodel_of_inference_client_does_not_raise_error(): - def _del(client): - del client._general_client - del client._infer_client - - async def _create_client_and_delete(): - client = AsyncioModelClient(HTTP_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) - await client.close() - threading.Thread(target=_del, args=(client,)).start() - - await _create_client_and_delete() - gc.collect() - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_grpc_client_infer_sample_returns_expected_result_when_infer_on_model_with_batching(mocker): - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - expected_result = {"add": a + b, "sub": a - b} - - model_config = ADD_SUB_WITH_BATCHING_MODEL_CONFIG - - _LOGGER.debug("Creating client") - client = AsyncioModelClient(GRPC_LOCALHOST_URL, model_config.model_name) - _LOGGER.debug("Creating client") - patch_client__server_up_and_ready(mocker, AsyncioGrpcInferenceServerClient) - patch_grpc_client__model_up_and_ready( - mocker, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG, AsyncioGrpcInferenceServerClient - ) - mock_infer = mocker.patch.object(client._infer_client, "infer") - mock_infer.return_value = wrap_to_http_infer_result(ADD_SUB_WITH_BATCHING_MODEL_CONFIG, "0", expected_result) - _LOGGER.debug("Entering client") - await client.__aenter__() - _LOGGER.debug("Entered client") - result = await client.infer_sample(a, b) - mock_infer.assert_called_with( - model_name=model_config.model_name, - model_version="", - inputs=ANY, - request_id=ANY, - headers=None, - parameters=None, - outputs=ANY, - client_timeout=60.0, - ) - _LOGGER.debug("Exiting client") - await client.__aexit__(None, None, None) - _LOGGER.debug("Exited client") - - assert result == expected_result - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_grpc_client_non_lazy_aenter_failure_triton_non_ready(mocker): - model_config = ADD_SUB_WITH_BATCHING_MODEL_CONFIG - - _LOGGER.debug("Creating client") - client = AsyncioModelClient(GRPC_LOCALHOST_URL, model_config.model_name, init_timeout_s=0.1, lazy_init=False) - _LOGGER.debug("Before patching") - patch_client__server_up_and_ready(mocker, AsyncioGrpcInferenceServerClient, ready_server=False) - _LOGGER.debug("Entering client") - with pytest.raises(PyTritonClientTimeoutError): - await asyncio.wait_for(client.__aenter__(), 0.2) - _LOGGER.debug("Exiting client without error") - _LOGGER.debug("Exited client with error") - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_grpc_client_non_lazy_aenter_failure_triton_non_live(mocker): - model_config = ADD_SUB_WITH_BATCHING_MODEL_CONFIG - - _LOGGER.debug("Creating client") - client = AsyncioModelClient(GRPC_LOCALHOST_URL, model_config.model_name, init_timeout_s=0.1, lazy_init=False) - _LOGGER.debug("Before patching") - patch_client__server_up_and_ready(mocker, AsyncioGrpcInferenceServerClient, live_server=False) - _LOGGER.debug("Entering client") - with pytest.raises(PyTritonClientTimeoutError): - await asyncio.wait_for(client.__aenter__(), 0.2) - _LOGGER.debug("Exiting client without error") - _LOGGER.debug("Exited client with error") - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_grpc_client_non_lazy_aenter_failure_model_non_ready(mocker): - model_config = ADD_SUB_WITH_BATCHING_MODEL_CONFIG - - _LOGGER.debug("Creating client") - client = AsyncioModelClient(GRPC_LOCALHOST_URL, model_config.model_name, init_timeout_s=0.1, lazy_init=False) - _LOGGER.debug("Before patching") - patch_client__server_up_and_ready(mocker, AsyncioGrpcInferenceServerClient) - patch_grpc_client__model_up_and_ready(mocker, model_config, AsyncioGrpcInferenceServerClient, ready=False) - _LOGGER.debug("Entering client") - with pytest.raises(PyTritonClientTimeoutError): - await asyncio.wait_for(client.__aenter__(), 0.2) - _LOGGER.debug("Exiting client without error") - _LOGGER.debug("Exited client with error") - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_grpc_client_non_lazy_aenter_failure_model_state_unavailable(mocker): - model_config = ADD_SUB_WITH_BATCHING_MODEL_CONFIG - - _LOGGER.debug("Creating client") - client = AsyncioModelClient(GRPC_LOCALHOST_URL, model_config.model_name, init_timeout_s=1, lazy_init=False) - _LOGGER.debug("Before patching") - patch_client__server_up_and_ready(mocker, AsyncioGrpcInferenceServerClient) - patch_grpc_client__model_up_and_ready(mocker, model_config, AsyncioGrpcInferenceServerClient, ready=False) - _LOGGER.debug("Entering client") - with pytest.raises(PyTritonClientTimeoutError): - await asyncio.wait_for(client.__aenter__(), 2) - _LOGGER.debug("Exiting client without error") - _LOGGER.debug("Exited client with error") - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_grpc_client_non_lazy_aenter_failure_model_incorrect_name(mocker): - model_config = ADD_SUB_WITH_BATCHING_MODEL_CONFIG - - _LOGGER.debug("Creating client") - client = AsyncioModelClient(GRPC_LOCALHOST_URL, "DUMMY", init_timeout_s=1, lazy_init=False) - _LOGGER.debug("Before patching") - patch_client__server_up_and_ready(mocker, AsyncioGrpcInferenceServerClient) - patch_grpc_client__model_up_and_ready(mocker, model_config, AsyncioGrpcInferenceServerClient) - _LOGGER.debug("Entering client") - with pytest.raises(PyTritonClientTimeoutError): - await asyncio.wait_for(client.__aenter__(), 2) - _LOGGER.debug("Exiting client without error") - _LOGGER.debug("Exited client with error") - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_grpc_client_non_lazy_aenter_failure_model_incorrect_version(mocker): - model_config = ADD_SUB_WITH_BATCHING_MODEL_CONFIG - - _LOGGER.debug("Creating client") - client = AsyncioModelClient( - GRPC_LOCALHOST_URL, model_config.model_name, model_version="2", init_timeout_s=1, lazy_init=False - ) - _LOGGER.debug("Before patching") - patch_client__server_up_and_ready(mocker, AsyncioGrpcInferenceServerClient) - patch_grpc_client__model_up_and_ready(mocker, model_config, AsyncioGrpcInferenceServerClient) - _LOGGER.debug("Entering client") - with pytest.raises(PyTritonClientTimeoutError): - await asyncio.wait_for(client.__aenter__(), 2) - _LOGGER.debug("Exiting client without error") - _LOGGER.debug("Exited client with error") - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_grpc_client_infer_sample_fails_on_model_with_batching(mocker): - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - - model_config = ADD_SUB_WITH_BATCHING_MODEL_CONFIG - - _LOGGER.debug("Creating client") - client = AsyncioModelClient(GRPC_LOCALHOST_URL, model_config.model_name) - _LOGGER.debug("Creating client") - patch_client__server_up_and_ready(mocker, AsyncioGrpcInferenceServerClient) - patch_grpc_client__model_up_and_ready(mocker, model_config, AsyncioGrpcInferenceServerClient) - mock_infer = mocker.patch.object(client._infer_client, "infer") - - def _model_infer_mock(*args, **kwargs): - raise PyTritonClientValueError("Dummy exception") - - mock_infer.side_effect = _model_infer_mock - - _LOGGER.debug("Entering client") - await client.__aenter__() - _LOGGER.debug("Entered client") - - with pytest.raises(PyTritonClientValueError): - await client.infer_sample(a, b) - - _LOGGER.debug("Exiting client") - await client.__aexit__(None, None, None) - _LOGGER.debug("Exited client") - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_http_init_passes_timeout(mocker): - async with AsyncioModelClient( - "http://localhost:6669", "dummy", init_timeout_s=0.2, inference_timeout_s=0.1 - ) as client: - with pytest.raises(PyTritonClientTimeoutError): - await client.wait_for_model(timeout_s=0.2) - - -@pytest.mark.async_timeout(_MAX_TEST_TIME) -async def test_async_grpc_init_passes_timeout(mocker): - async with AsyncioModelClient( - "grpc://localhost:6669", "dummy", init_timeout_s=0.2, inference_timeout_s=0.1 - ) as client: - with pytest.raises(PyTritonClientTimeoutError): - await client.wait_for_model(timeout_s=0.2) diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_client_utils.py b/stf/stf-api-alternative/pytriton/tests/unit/test_client_utils.py deleted file mode 100644 index 41b946d7e216ad923ac9a20f11e470c366b4f47e..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_client_utils.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import logging - -import pytest - -from pytriton.client.exceptions import PyTritonClientInvalidUrlError -from pytriton.client.utils import TritonUrl - -logging.basicConfig(level=logging.DEBUG) -LOGGER = logging.getLogger("test_client_utils") - - -def test_parse_triton_url_correctly(): - assert TritonUrl.from_url("localhost:8000") == TritonUrl("http", "localhost", 8000) - assert TritonUrl.from_url("localhost") == TritonUrl("http", "localhost", 8000) - assert TritonUrl.from_url("http://abc:8000") == TritonUrl("http", "abc", 8000) - assert TritonUrl.from_url("grpc://cde:8008") == TritonUrl("grpc", "cde", 8008) - assert TritonUrl.from_url("http://efg") == TritonUrl("http", "efg", 8000) - assert TritonUrl.from_url("grpc://ghi") == TritonUrl("grpc", "ghi", 8001) - - -def test_parse_triton_url_raise_exception_when_schema_is_not_supported(): - with pytest.raises(PyTritonClientInvalidUrlError): - TritonUrl.from_url("ftp://localhost:8000") - with pytest.raises(PyTritonClientInvalidUrlError): - TritonUrl.from_url("https://localhost") - - -def test_triton_url_with_schema(): - assert TritonUrl.from_url("localhost").with_scheme == "http://localhost:8000" - assert TritonUrl.from_url("grpc://some:9090").with_scheme == "grpc://some:9090" - - -def test_triton_url_without_schema(): - assert TritonUrl.from_url("localhost").without_scheme == "localhost:8000" - assert TritonUrl.from_url("grpc://some:9090").without_scheme == "some:9090" diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_communication_numpy_serialization.py b/stf/stf-api-alternative/pytriton/tests/unit/test_communication_numpy_serialization.py deleted file mode 100644 index 7aaca75c858449618666e8eaa3b2483ddfefb4dc..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_communication_numpy_serialization.py +++ /dev/null @@ -1,103 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import numpy as np -import pytest - -from pytriton.proxy.communication import ( - calc_serialized_size_of_numpy_with_struct_header, - deserialize_numpy_with_struct_header, - serialize_numpy_with_struct_header, -) - -# subset of test cases from https://github.com/dask/distributed/blob/main/distributed/protocol/tests/test_numpy.py -_test_cases = [ - np.ones(5), - np.array(5), - np.random.random((5, 5)), - np.random.random((5, 5))[::2, :], - np.random.random((5, 5))[:, ::2], - np.asfortranarray(np.random.random((5, 5))), - np.asfortranarray(np.random.random((5, 5)))[::2, :], - np.asfortranarray(np.random.random((5, 5)))[:, ::2], - np.random.random(5).astype("f4"), - np.random.random(5).astype(">i8"), - np.random.random(5).astype("", - b"1", - b" 2", - b" 3", - b" 4", - b" 5", - b" 6", - b" 7", - b" 8", - b" 9", - b" 10", - b" 11", - b" 12", - b" 13", - b" 14", - b" 15", - b" 16", - b" 17", - b" 18", - b" 19", - b" 20", - b" 21", - b" 22", - b" 23", - b" 24", - b" 25", - b" 26", - b" 27", - b" 28", - b" 29", - b" 30", - b" 31", - b" 32", - b" 33", - ] - ] - ), -] - - -@pytest.mark.parametrize("x", _test_cases) -def test_serialize_and_deserialize_np_array(x): - frames = serialize_numpy_with_struct_header(x) - assert all(isinstance(frame, (bytes, memoryview)) for frame in frames) - y = deserialize_numpy_with_struct_header(frames) - assert x.shape == y.shape, (x.shape, y.shape) - assert x.dtype == y.dtype, (x.dtype, y.dtype) - if x.flags.c_contiguous or x.flags.f_contiguous: - assert x.strides == y.strides, (x.strides, y.strides) - np.testing.assert_equal(x, y) - - -@pytest.mark.parametrize("x", _test_cases) -def test_calc_serialized_size_of_numeric_np_array(x): - frames = serialize_numpy_with_struct_header(x) - size = calc_serialized_size_of_numpy_with_struct_header(x) - assert size == [memoryview(frame).nbytes for frame in frames] diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_communication_tensor_store.py b/stf/stf-api-alternative/pytriton/tests/unit/test_communication_tensor_store.py deleted file mode 100644 index 146182b34f2a98d5050d32b7ad4857bb9190a0e2..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_communication_tensor_store.py +++ /dev/null @@ -1,178 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pathlib -import unittest.mock - -import numpy as np -import psutil -import pytest - -from pytriton.proxy.communication import TensorStore, _DataBlocksServer, serialize_numpy_with_struct_header - - -@pytest.fixture(scope="function") -def tensor_store(tmp_path): - data_store_socket = (tmp_path / "data_store.socket").as_posix() - tensor_store = TensorStore(data_store_socket) - try: - tensor_store.start() - yield tensor_store - finally: - tensor_store.close() - - -def test_tensor_store_create_and_close(tmp_path): - data_store_socket = (tmp_path / "data_store.socket").as_posix() - tensor_store = TensorStore(data_store_socket) # authkey will be taken from current process - tensor_store.start() - - try: - current_process = psutil.Process() - children = current_process.children(recursive=True) - assert len(children) == 2 # block store + resource tracker side processes should be created - finally: - tensor_store.close() - - children = current_process.children(recursive=True) - assert len(children) == 1 # block store side process should be closed; resource_tracker should be still running - - -def test_tensor_store_unregisters_shm_from_resource_tracker(tmp_path, mocker): - data_store_socket = (tmp_path / "data_store.socket").as_posix() - tensor_store = TensorStore(data_store_socket) # authkey will be taken from current process - tensor_store.start() # start new block store side process - - a = np.zeros((10, 10), dtype=np.float32) - b = np.array([b"foo", b"longer_bar"]) - c = np.array([b"foo", b"longer_bar"], dtype=object) - - from multiprocessing import resource_tracker - - spy_unregister = mocker.spy(resource_tracker, "unregister") - - tensor_store.put([a, b, c]) - - shm_names = { - shm._name for shm, tensor_ref in tensor_store._handled_blocks.values() # pytype: disable=attribute-error - } - - tensor_store.close() - - expected_unregister_calls = [unittest.mock.call(shm_name, "shared_memory") for shm_name in shm_names] - spy_unregister.assert_has_calls(expected_unregister_calls, any_order=True) - - -def test_tensor_store_shared_memory_unlinked_on_tensor_store_close(tmp_path): - data_store_socket = (tmp_path / "data_store.socket").as_posix() - tensor_store = TensorStore(data_store_socket) # authkey will be taken from current process - tensor_store.start() # start new block store side process - - a = np.zeros((10, 10), dtype=np.float32) - b = np.array([b"foo", b"longer_bar"]) - c = np.array([b"foo", b"longer_bar"], dtype=object) - - tensor_store.put([a, b, c]) - - shm_names = { - shm._name for shm, tensor_ref in tensor_store._handled_blocks.values() # pytype: disable=attribute-error - } - - for shm_name in shm_names: - shm_path = pathlib.Path("/dev/shm") / shm_name[1:] - assert shm_path.exists() # shared memory should be present - - tensor_store.close() - - for shm_name in shm_names: - shm_path = pathlib.Path("/dev/shm") / shm_name[1:] - assert not shm_path.exists() # shared memory should be unlinked - - -def test_tensor_store_connection_timeout(tmp_path): - data_store_socket = (tmp_path / "data_store.socket").as_posix() - tensor_store = TensorStore(data_store_socket) - - with pytest.raises(TimeoutError): - tensor_store.connect(timeout_s=0.05) - - tensor_store.start() - tensor_store.connect(timeout_s=0.05) - - -# 12bytes is 3x4bytes (num of segments and 2 segments sizes - header + np array) -_flat_array_header_size = len(serialize_numpy_with_struct_header(np.zeros((1,), dtype=np.int8))[0]) + 12 - - -@pytest.mark.parametrize( - "tensors, n_times", - ( - # different dtypes - ( - [ - np.zeros((10, 10), dtype=np.float32), - np.array([b"foo", b"longer_bar"]), - np.array([b"foo", b"longer_bar"], dtype=object), - ], - 1, - ), - # case when tensors are larger than minimal segment size - ( - [ - np.zeros( - (_DataBlocksServer._minimal_segment_size // np.dtype(np.float32).itemsize * 3,), dtype=np.float32 - ), - np.zeros( - (_DataBlocksServer._minimal_segment_size // np.dtype(np.float32).itemsize * 5), dtype=np.float32 - ), - np.zeros( - int(_DataBlocksServer._minimal_segment_size // np.dtype(np.float32).itemsize * 0.75), - dtype=np.float32, - ), - ], - 1, - ), - # size match exactly single segment, thus free_blocks should be empty - ( - [ - np.zeros( - ( - (_DataBlocksServer._minimal_segment_size - _flat_array_header_size) - // np.dtype(np.int8).itemsize, - ), - dtype=np.int8, - ), - ], - 2, - ), - # 2GB bytes array - ( - [ - np.array(b"a" * (2**31 - 1), dtype=bytes), - ], - 1, - ), - ), -) -def test_tensor_store_get_put_equal(tensor_store, tensors, n_times): - for _ in range(n_times): - try: - tensors_ids = tensor_store.put(tensors) - assert len(tensors) == len(tensors_ids) - for tensor, tensor_id in zip(tensors, tensors_ids): - tensor_retrieved = tensor_store.get(tensor_id) - np.testing.assert_equal(tensor, tensor_retrieved) - finally: - for tensor_id in tensors_ids: - tensor_store.release_block(tensor_id) diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_decorators.py b/stf/stf-api-alternative/pytriton/tests/unit/test_decorators.py deleted file mode 100644 index 9174393930d635fa3caf44b4a51efd6a1d137cac..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_decorators.py +++ /dev/null @@ -1,990 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Inference decorators tests.""" -import typing - -import numpy as np -import pytest -import wrapt - -from pytriton.constants import TRITON_CONTEXT_FIELD_NAME -from pytriton.decorators import ( - ConstantPadder, - InferenceRequest, - InferenceRequests, - InferenceResult, - InputNames, - ModelConfigDict, - TritonContext, - batch, - fill_optionals, - first_value, - get_model_config, - group_by_keys, - group_by_values, - pad_batch, - sample, - triton_context, -) -from pytriton.exceptions import PyTritonBadParameterError, PyTritonRuntimeError, PyTritonValidationError -from pytriton.model_config import DynamicBatcher -from pytriton.model_config.triton_model_config import TensorSpec, TritonModelConfig -from pytriton.models.model import _inject_triton_context -from pytriton.proxy.types import Request -from tests.unit.utils import verify_equalness_of_dicts_with_ndarray - -input_requests = [ - Request({"b": np.array([[1, 2]]), "a": np.array([[1, 9]])}, {}), - Request({"b": np.array([[3, 4]])}, {}), - Request({"b": np.array([[7, 5], [8, 6]]), "a": np.array([[1, 1], [1, 1]])}, {}), - Request({"b": np.array([[1, 2], [1, 2]]), "a": np.array([[2, 4], [2, 4]])}, {}), - Request({"b": np.array([[1, 2], [1, 2], [9, 9]]), "a": np.array([[1, 1], [1, 1], [1, 1]])}, {}), - Request({"a": np.array([[1, 1], [1, 1], [1, 1]])}, {}), -] - -input_requests_for_sample = [Request({"b": np.array([[7, 5], [8, 6]]), "a": np.array([[1], [1]])}, {})] - -three_request_for_batching = [ - Request({"b": np.array([[7, 5], [8, 6]]), "a": np.array([[1], [1]])}, {}), - Request({"b": np.array([[1, 2], [1, 2], [11, 12]]), "a": np.array([[1], [1], [1]])}, {}), - Request({"b": np.array([[1, 2]]), "a": np.array([[1]])}, {}), -] - - -def _prepare_and_inject_context_with_config(config, fun): - context = TritonContext() - context.model_configs[fun] = config - _inject_triton_context(context, fun) - return context - - -def test_get_model_config_key(): - def fn(): - pass - - def fn2(): - pass - - class CallableClass: - def __call__(self): - pass - - def method(self): - pass - - inst = CallableClass() - inst2 = CallableClass() - - assert ModelConfigDict._get_model_config_key(fn) == str(fn) - assert ModelConfigDict._get_model_config_key(inst) == str(inst) - assert ModelConfigDict._get_model_config_key(inst.method) == str(inst.method) - assert ModelConfigDict._get_model_config_key(inst.__call__) == str(inst) - - config_dict = ModelConfigDict() - config_dict[fn] = TritonModelConfig(model_name="fn") - config_dict[fn2] = TritonModelConfig(model_name="fn2") - assert config_dict[fn] == TritonModelConfig(model_name="fn") - assert config_dict[fn] != config_dict[fn2] - - config_dict[inst] = TritonModelConfig(model_name="inst") - config_dict[inst2] = TritonModelConfig(model_name="inst2") - assert config_dict[inst] == TritonModelConfig(model_name="inst") - assert config_dict[inst] != config_dict[inst2] - - keys = {fn, fn2, inst, inst2} - keys1 = set(config_dict.keys()) - keys2 = set(iter(config_dict)) - assert keys == keys1 - assert keys == keys2 - - -def _prepare_context_for_input(inputs, fun): - a_input = inputs[0]["a"] - b_input = inputs[0]["b"] - - a_spec = TensorSpec("a", a_input.shape, a_input.dtype) - b_spec = TensorSpec("b", b_input.shape, b_input.dtype) - - config = TritonModelConfig("a", inputs=[a_spec, b_spec], outputs=[a_spec, b_spec]) - context = TritonContext() - context.model_configs[fun] = config - - return context - - -input_batch_with_params = {"b": np.array([[1, 2], [1, 2], [9, 9]]), "a": np.array([[1], [1], [1]])} - - -def test_pad_batch(): - @pad_batch - def padded_fun(**inputs): - assert "a" in inputs and "b" in inputs - assert inputs["a"].shape[0] == 4 and inputs["b"].shape[0] == 4 - assert np.all(inputs["a"] == np.array([[1], [1], [1], [1]])) and np.all( - inputs["b"] == np.array([[1, 2], [1, 2], [9, 9], [9, 9]]) - ) - return inputs - - config = TritonModelConfig("MyModel", max_batch_size=8, batcher=DynamicBatcher(preferred_batch_size=[2, 4, 6])) - config.batcher.preferred_batch_size = [2, 4, 6] - _prepare_and_inject_context_with_config(config, padded_fun) - results = padded_fun(**(input_batch_with_params.copy())) - assert results["a"].shape[0] == 4 and results["b"].shape[0] == 4 - - -def test_pad_batch_no_preffered_batch_size(): - @pad_batch - def padded_fun(**inputs): - assert "a" in inputs and "b" in inputs - assert inputs["a"].shape[0] == 8 and inputs["b"].shape[0] == 8 - assert np.all(inputs["a"] == np.array([[1], [1], [1], [1], [1], [1], [1], [1]])) and np.all( - inputs["b"] == np.array([[1, 2], [1, 2], [9, 9], [9, 9], [9, 9], [9, 9], [9, 9], [9, 9]]) - ) - - return inputs - - config = TritonModelConfig("MyModel", max_batch_size=8) - _prepare_and_inject_context_with_config(config, padded_fun) - results = padded_fun(**(input_batch_with_params.copy())) - assert results["a"].shape[0] == config.max_batch_size and results["b"].shape[0] == config.max_batch_size - - -def test_sample(): - @sample - def sample_fun(**inputs): - assert isinstance(inputs, dict) and "a" in inputs and "b" in inputs - return {"a": inputs["a"] * 2, "b": inputs["b"] * 3} - - results = sample_fun(input_requests_for_sample) - - for input, output in zip(three_request_for_batching, results): - assert np.all(input["a"] * 2 == output["a"]) and np.all(input["b"] * 3 == output["b"]) - - -def test_sample_output_list(): - @sample - def sample1(**inputs): - assert isinstance(inputs, dict) and "a" in inputs and "b" in inputs - return [inputs["a"] * 2, inputs["b"] * 3] - - context = _prepare_context_for_input(input_requests_for_sample, sample1) - sample1.__triton_context__ = context - results = sample1(input_requests_for_sample) - - for input, output in zip(input_requests_for_sample, results): - assert np.all(input["a"] * 2 == output["a"]) and np.all(input["b"] * 3 == output["b"]) - - -_FIRST_VALUE_MODEL_CONFIG = TritonModelConfig(model_name="foo", inputs=[], outputs=[]) - - -@pytest.mark.parametrize( - "inputs, keys, expected", - ( - ( # extract 1st item (scalar) from 1D array - {"a": np.array([1, 2, 3]), "b": np.array([1, 1, 1])}, - ["b"], - {"a": np.array([1, 2, 3]), "b": np.int64(1)}, - ), - ( # extract 1st item (scalar) from 3D array of shape (batch_size, 1, 1) - {"a": np.array([1, 2, 3]), "b": np.array([[[1]], [[1]], [[1]]])}, - ["b"], - {"a": np.array([1, 2, 3]), "b": np.int64(1)}, - ), - ( # extract 1st item (2D) from 3D array of shape != (batch_size, 1, 1) - {"a": np.array([1, 2, 3]), "b": np.array([[[1], [2]], [[1], [2]], [[1], [2]]])}, - ["b"], - {"a": np.array([1, 2, 3]), "b": np.array([[1], [2]])}, - ), - ( # extract 1st item (scalar) from 1D array of strings (objects) - {"a": np.array([1, 2, 3]), "b": np.array(["val1", "val1"], dtype=object)}, - ["b"], - {"a": np.array([1, 2, 3]), "b": np.object_("val1")}, - ), - ( # extract 1st item (scalar) from 3D array of strings (objects) with shape (batch_size, 1, 1) - {"a": np.array([1, 2, 3]), "b": np.array([[["val1"]], [["val1"]]], dtype=object)}, - ["b"], - {"a": np.array([1, 2, 3]), "b": np.object_("val1")}, - ), - ( # do not raise error when key is missing in inputs - {"a": np.array([1, 2, 3]), "b": np.array([1, 1, 1])}, - ["c"], # optional name - {"a": np.array([1, 2, 3]), "b": np.array([1, 1, 1])}, - ), - ( # extract 1st item (scalar) from 1D array + do not raise error when key is missing in inputs - {"a": np.array([1, 2, 3]), "b": np.array([1, 1, 1])}, - ["b", "c"], # optional name - {"a": np.array([1, 2, 3]), "b": np.int64(1)}, - ), - ( # extract 1st item (scalar) from 1D array on 2 inputs - {"a": np.array([2, 2, 2]), "b": np.array([1, 1, 1])}, - ["a", "b"], - {"a": np.int64(2), "b": np.int64(1)}, - ), - ), -) -def test_first_value_with_single_request(mocker, inputs, keys, expected): - """Assume @batch is before decorator""" - - class PassTrough: - def __call__(self, **_inputs): - return _inputs - - passtrough = PassTrough() - spy_passtrough = mocker.spy(passtrough, "__call__") - - @first_value(*keys) - def _fn(**_inputs): - return spy_passtrough(**_inputs) - - _prepare_and_inject_context_with_config(_FIRST_VALUE_MODEL_CONFIG, _fn) - - result = _fn(**inputs) - verify_equalness_of_dicts_with_ndarray(result, expected) - - for call_args, expected_args in zip(spy_passtrough.call_args_list, [expected]): - verify_equalness_of_dicts_with_ndarray(call_args.kwargs, expected_args) - - -@pytest.mark.parametrize( - "requests, keys, expected", - ( - ( # single request - extract 1st item (scalar) from 1D array - [{"a": np.array([1, 2, 3]), "b": np.array([1, 1, 1])}], - ["b"], - [{"a": np.array([1, 2, 3]), "b": np.int64(1)}], - ), - ( # multiple requests - extract 1st item (scalar) from 3D array of shape (batch_size, 1, 1) - [ - {"a": np.array([1, 2, 3]), "b": np.array([[[1]], [[1]], [[1]]])}, - {"a": np.array([1, 2, 3]), "b": np.array([[[1]], [[1]], [[1]]])}, - ], - ["b", "optional"], - [ - {"a": np.array([1, 2, 3]), "b": np.int64(1)}, - {"a": np.array([1, 2, 3]), "b": np.int64(1)}, - ], - ), - ), -) -def test_first_value_with_requests(mocker, requests, keys, expected): - """Assume no @batch is before decorator""" - - class PassTrough: - def __call__(self, _requests): - return _requests - - passtrough = PassTrough() - spy_passtrough = mocker.spy(passtrough, "__call__") - - @first_value(*keys) - def _fn(_requests): - return spy_passtrough(_requests) - - _prepare_and_inject_context_with_config(_FIRST_VALUE_MODEL_CONFIG, _fn) - - results = _fn(requests) - assert len(results) == len(expected) - for result, expected_request in zip(results, expected): - verify_equalness_of_dicts_with_ndarray(result, expected_request) - - for call_args, expected_requests in zip(spy_passtrough.call_args_list, [expected]): - called_requests, *_ = call_args.args - for called_request, expected_request in zip(called_requests, expected_requests): - verify_equalness_of_dicts_with_ndarray(called_request, expected_request) - - -def test_first_value_raises_on_special_key(): - with pytest.raises(PyTritonBadParameterError, match="not allowed as keys for @first_value wrapper."): - - @first_value("__triton_context__") - def _fn(**inputs): - pass - - -def test_first_value_raises_on_not_equal_values(): - @first_value("a") - def _fn(**inputs): - pass - - _prepare_and_inject_context_with_config(_FIRST_VALUE_MODEL_CONFIG, _fn) - - with pytest.raises(PyTritonRuntimeError, match="The values on the .* input are not equal"): - _fn(a=np.array([[1], [2], [2]])) - - # test disabling strict check - @first_value("a", strict=False) - def _fn(**inputs): - pass - - _prepare_and_inject_context_with_config(_FIRST_VALUE_MODEL_CONFIG, _fn) - _fn(a=np.array([[1], [2], [2]])) - - -def test_first_value_raises_on_models_not_supporting_batching(): - @first_value("a") - def _fn(**inputs): - pass - - _prepare_and_inject_context_with_config( - TritonModelConfig(model_name="foo", inputs=[], outputs=[], batching=False), _fn - ) - - with pytest.raises( - PyTritonRuntimeError, match="The @first_value decorator can only be used with models that support batching." - ): - _fn(a=np.array([[1], [2], [2]])) - - -def test_group_by_keys(): - @group_by_keys - def groupby_keys_fun(inputs): - for req1, req2 in zip(inputs, inputs[1:]): - assert req1.keys() == req2.keys() - k = len(inputs[0].keys()) - results = [{key: inp[key] * k for key in inp} for inp in inputs] - return results - - results = groupby_keys_fun(input_requests) - for req, res in zip(input_requests, results): - assert req.keys() == res.keys() - for key in req: - assert np.all(req[key] * len(req.keys()) == res[key]) - - -class GroupByValuesTestCase(typing.NamedTuple): - inference_request: InferenceRequest - keys: InputNames - expected: typing.Optional[InferenceRequests] = None - expected_result: typing.Optional[InferenceResult] = None - - -_idx1 = "1" -_idx2 = "2" - - -@pytest.mark.parametrize( - "inference_request, keys, expected, expected_result", - ( - GroupByValuesTestCase( - inference_request={ - "a": np.array([[1], [1], [1], [1], [1], [1], [2], [2], [2], [2]]), - "b": np.array([[7, 5], [8, 6], [1, 2], [1, 2], [11, 12], [1, 2], [5, 6], [7, 2], [4, 2], [1, 122]]), - }, - keys=["a"], - expected=( - { - "a": np.array([[1], [1], [1], [1], [1], [1]]), - "b": np.array([[7, 5], [8, 6], [1, 2], [1, 2], [11, 12], [1, 2]]), - }, - {"a": np.array([[2], [2], [2], [2]]), "b": np.array([[5, 6], [7, 2], [4, 2], [1, 122]])}, - ), - ), - # using concatenation with _idx variables to avoid string interning - # https://stackabuse.com/guide-to-string-interning-in-python/ - GroupByValuesTestCase( # string values - inference_request={ - "a": np.array([[1], [1], [1], [1], [1], [1], [2], [2], [2], [2]]), - "s": np.array( - [ - "t" + _idx1, - "t" + _idx2, - "t" + _idx1, - "t" + _idx1, - "t" + _idx2, - "t" + _idx2, - "t" + _idx1, - "t" + _idx1, - "t" + _idx1, - "t" + _idx1, - ], - dtype=object, - ), - }, - keys=["s"], - expected=( - { - "a": np.array([[1], [1], [1], [2], [2], [2], [2]]), - "s": np.array( - ["t" + _idx1, "t" + _idx1, "t" + _idx1, "t" + _idx1, "t" + _idx1, "t" + _idx1, "t" + _idx1], - dtype=object, - ), - }, - {"a": np.array([[1], [1], [1]]), "s": np.array(["t" + _idx2, "t" + _idx2, "t" + _idx2], dtype=object)}, - ), - ), - # using concatenation with _idx variables to avoid string interning - # https://stackabuse.com/guide-to-string-interning-in-python/ - GroupByValuesTestCase( # 2d array of string values - inference_request={ - "a": np.array([[1], [1], [1], [1], [1], [1], [2], [2], [2], [2]]), - "s": np.array( - [ - ["t" + _idx1, "t" + _idx1], - ["t" + _idx2, "t" + _idx2], - ["t" + _idx1, "t" + _idx1], - ["t" + _idx1, "t" + _idx1], - ["t" + _idx2, "t" + _idx2], - ["t" + _idx2, "t" + _idx1], - ["t" + _idx1, "t" + _idx1], - ["t" + _idx1, "t" + _idx1], - ["t" + _idx1, "t" + _idx1], - ["t" + _idx1, "t" + _idx1], - ], - dtype=object, - ), - }, - keys=["s"], - expected=( - { - "a": np.array([[1], [1], [1], [2], [2], [2], [2]]), - "s": np.array( - [ - ["t" + _idx1, "t" + _idx1], - ["t" + _idx1, "t" + _idx1], - ["t" + _idx1, "t" + _idx1], - ["t" + _idx1, "t" + _idx1], - ["t" + _idx1, "t" + _idx1], - ["t" + _idx1, "t" + _idx1], - ["t" + _idx1, "t" + _idx1], - ], - dtype=object, - ), - }, - {"a": np.array([[1]]), "s": np.array([["t" + _idx2, "t" + _idx1]], dtype=object)}, - { - "a": np.array([[1], [1]]), - "s": np.array([["t" + _idx2, "t" + _idx2], ["t" + _idx2, "t" + _idx2]], dtype=object), - }, - ), - ), - GroupByValuesTestCase( # group by 2 keys - inference_request={ - "a": np.array([[1], [1], [1], [1], [1], [1], [2], [2], [2], [2]]), - "s": np.array( - [ - ["t1", "t1"], - ["t2", "t2"], - ["t1", "t1"], - ["t1", "t1"], - ["t2", "t2"], - ["t2", "t1"], - ["t1", "t1"], - ["t1", "t1"], - ["t1", "t1"], - ["t1", "t1"], - ], - dtype=object, - ), - }, - keys=["a", "s"], - expected=( - { - "a": np.array([[1], [1], [1]]), - "s": np.array([["t1", "t1"], ["t1", "t1"], ["t1", "t1"]], dtype=object), - }, - {"a": np.array([[1]]), "s": np.array([["t2", "t1"]], dtype=object)}, - {"a": np.array([[1], [1]]), "s": np.array([["t2", "t2"], ["t2", "t2"]], dtype=object)}, - { - "a": np.array([[2], [2], [2], [2]]), - "s": np.array([["t1", "t1"], ["t1", "t1"], ["t1", "t1"], ["t1", "t1"]], dtype=object), - }, - ), - ), - ), -) -def test_group_by_values(mocker, inference_request, keys, expected, expected_result): - class PassTrough: - def __call__(self, **inputs): - return inputs - - passtrough = PassTrough() - spy_passtrough = mocker.spy(passtrough, "__call__") - - @group_by_values(*keys) - def _fn(**inputs): - return spy_passtrough(**inputs) - - result = _fn(**inference_request) - verify_equalness_of_dicts_with_ndarray(result, inference_request) - - for call_args, expected_request in zip(spy_passtrough.call_args_list, expected): - called_request = call_args.kwargs - verify_equalness_of_dicts_with_ndarray(called_request, expected_request) - - -def _expected_test_group_by_values_with_dynamic_axes_on_output(): - expected = np.zeros((10, 16, 4, 4), dtype="int") - expected[:6, :16, :4, :2] = 1 - expected[6:, :3, :2, :4] = 1 - return expected - - -@pytest.mark.parametrize( - "inference_request, keys, expected, expected_result", - ( - GroupByValuesTestCase( # output axes: a: (1,), output: (-1,) - inference_request={ - "a": np.array([[1], [1], [1], [1], [1], [1], [2], [2], [2], [2]]), - "b": np.array([[7, 5], [8, 6], [1, 2], [1, 2], [11, 12], [1, 2], [5, 6], [7, 2], [4, 2], [1, 122]]), - "output_length": np.array([[8], [8], [16], [16], [4], [8], [3], [3], [3], [3]]), - }, - keys=["a"], - expected_result={ - "a": np.array([[1], [1], [1], [1], [1], [1], [2], [2], [2], [2]]), - "output": np.block( - [ # 16 is the max output_length - [np.ones((6, 16), dtype="int")], - [np.ones((4, 3), dtype="int"), np.zeros((4, 13), dtype="int")], - ] - ), - }, - ), - GroupByValuesTestCase( # output axes: a: (1,), output: (-1, -1, -1) - inference_request={ - "a": np.array([[1], [1], [1], [1], [1], [1], [2], [2], [2], [2]]), - "b": np.array([[7, 5], [8, 6], [1, 2], [1, 2], [11, 12], [1, 2], [5, 6], [7, 2], [4, 2], [1, 122]]), - "output_length": np.array( - [ - [8, 2, 1], - [8, 2, 1], - [16, 4, 2], - [16, 4, 2], - [4, 2, 2], - [8, 2, 2], - [3, 1, 4], - [3, 1, 4], - [3, 2, 2], - [3, 2, 2], - ] - ), - }, - keys=["a"], - expected_result={ - "a": np.array([[1], [1], [1], [1], [1], [1], [2], [2], [2], [2]]), - "output": _expected_test_group_by_values_with_dynamic_axes_on_output(), - }, - ), - ), -) -def test_group_by_values_with_dynamic_axes_on_output(mocker, inference_request, keys, expected, expected_result): - @group_by_values(*keys, pad_fn=ConstantPadder(0)) - def _fn(**inputs): - return { - "a": inputs["a"], - "output": np.ones((len(inputs["a"]), *np.max(inputs["output_length"], axis=0).tolist()), dtype="int"), - } - - result = _fn(**inference_request) - verify_equalness_of_dicts_with_ndarray(result, expected_result) - - -def test_group_by_values_with_dynamic_axes_of_bytes_on_output(): - @group_by_values("a", pad_fn=ConstantPadder(0)) - def _fn(**inputs): - if inputs["a"][0][0] == 1: - sequences = np.array( - [ - [b"foo", b"barxxx", b""], - [b"bar1", b"Loriem ipsum", b"foo"], - [b"foo", b"barxxx", b""], - [b"bar1", b"Loriem ipsum", b"foo"], - [b"foo", b"barxxx", b""], - [b"bar1", b"Loriem ipsum", b"foo"], - ] - ) - else: - sequences = np.array( - [ - [b"foo", b"bar", b"", b""], - [b"1", b"22", b"3", b"4444"], - [b"foo", b"bar", b"", b""], - [b"1", b"22", b"3", b"4444"], - ] - ) - - return {"a": inputs["a"], "output": sequences} - - a = np.array([[1], [1], [1], [1], [1], [1], [2], [2], [2], [2]]) - inference_request = {"a": a} - expected_result = { - "a": a, - "output": np.array( - [ - [b"foo", b"barxxx", b"", b""], - [b"bar1", b"Loriem ipsum", b"foo", b""], - [b"foo", b"barxxx", b"", b""], - [b"bar1", b"Loriem ipsum", b"foo", b""], - [b"foo", b"barxxx", b"", b""], - [b"bar1", b"Loriem ipsum", b"foo", b""], - [b"foo", b"bar", b"", b""], - [b"1", b"22", b"3", b"4444"], - [b"foo", b"bar", b"", b""], - [b"1", b"22", b"3", b"4444"], - ] - ), - } - - result = _fn(**inference_request) - verify_equalness_of_dicts_with_ndarray(result, expected_result) - - -def test_group_by_values_with_dynamic_axes_of_unicode_on_output(): - @group_by_values("a", pad_fn=ConstantPadder(0)) - def _fn(**inputs): - if inputs["a"][0][0] == 1: - sequences = np.array( - [ - ["foo", "barxxx", ""], - ["bar1", "Loriem ipsum", "foo"], - ["foo", "barxxx", ""], - ["bar1", "Loriem ipsum", "foo"], - ["foo", "barxxx", ""], - ["bar1", "Loriem ipsum", "foo"], - ] - ) - else: - sequences = np.array( - [ - ["foo", "bar", "", ""], - ["1", "22", "3", "4444"], - ["foo", "bar", "", ""], - ["1", "22", "3", "4444"], - ] - ) - - return {"a": inputs["a"], "output": sequences} - - a = np.array([[1], [1], [1], [1], [1], [1], [2], [2], [2], [2]]) - inference_request = {"a": a} - expected_result = { - "a": a, - "output": np.array( - [ - ["foo", "barxxx", "", ""], - ["bar1", "Loriem ipsum", "foo", ""], - ["foo", "barxxx", "", ""], - ["bar1", "Loriem ipsum", "foo", ""], - ["foo", "barxxx", "", ""], - ["bar1", "Loriem ipsum", "foo", ""], - ["foo", "bar", "", ""], - ["1", "22", "3", "4444"], - ["foo", "bar", "", ""], - ["1", "22", "3", "4444"], - ] - ), - } - - result = _fn(**inference_request) - verify_equalness_of_dicts_with_ndarray(result, expected_result) - - -def test_group_by_values_with_dynamic_axes_of_bytes_as_objects_on_output(): - @group_by_values("a", pad_fn=ConstantPadder(0)) - def _fn(**inputs): - if inputs["a"][0][0] == 1: - sequences = np.array( - [ - [b"foo", b"barxxx", b""], - [b"bar1", b"Loriem ipsum", b"foo"], - [b"foo", b"barxxx", b""], - [b"bar1", b"Loriem ipsum", b"foo"], - [b"foo", b"barxxx", b""], - [b"bar1", b"Loriem ipsum", b"foo"], - ], - dtype=object, - ) - else: - sequences = np.array( - [ - [b"foo", b"bar", b"", b""], - [b"1", b"22", b"3", b"4444"], - [b"foo", b"bar", b"", b""], - [b"1", b"22", b"3", b"4444"], - ], - dtype=object, - ) - - return {"a": inputs["a"], "output": sequences} - - a = np.array([[1], [1], [1], [1], [1], [1], [2], [2], [2], [2]]) - inference_request = {"a": a} - expected_result = { - "a": a, - "output": np.array( - [ - [b"foo", b"barxxx", b"", b""], - [b"bar1", b"Loriem ipsum", b"foo", b""], - [b"foo", b"barxxx", b"", b""], - [b"bar1", b"Loriem ipsum", b"foo", b""], - [b"foo", b"barxxx", b"", b""], - [b"bar1", b"Loriem ipsum", b"foo", b""], - [b"foo", b"bar", b"", b""], - [b"1", b"22", b"3", b"4444"], - [b"foo", b"bar", b"", b""], - [b"1", b"22", b"3", b"4444"], - ], - dtype=object, - ), - } - - result = _fn(**inference_request) - verify_equalness_of_dicts_with_ndarray(result, expected_result) - - -def test_group_by_values_raise_error_if_placed_before_batch(): - with pytest.raises( - PyTritonRuntimeError, match="The @group_by_values decorator must be used after the @batch decorator." - ): - - @group_by_values("a") - @batch - def _fn(**_requests): - return _requests - - _fn([{"a": np.zeros((1,))}, {"a": np.zeros((1,))}]) - - -def test_fill_optionals_in_instance_callable(): - class MyModel: - @fill_optionals(a=np.array([-1, -2]), b=np.array([-5, -6])) - def __call__(self, inputs): - for req in inputs: - assert "a" in req and "b" in req - assert req["a"].shape[0] == req["b"].shape[0] - assert np.all(inputs[1]["a"] == np.array([[-1, -2], [-1, -2]])) - assert np.all(inputs[-1]["b"] == np.array([[-5, -6], [-5, -6], [-5, -6]])) - return inputs - - model = MyModel() - - _prepare_and_inject_context_with_config( - TritonModelConfig( - model_name="foo", - inputs=[TensorSpec("a", shape=(2,), dtype=np.int64), TensorSpec("b", shape=(2,), dtype=np.int64)], - outputs=[TensorSpec("a", shape=(2,), dtype=np.int64), TensorSpec("b", shape=(2,), dtype=np.int64)], - ), - model.__call__, - ) - - results = model(input_requests) - assert len(results) == len(input_requests) - - -def test_fill_optionals(): - @fill_optionals(a=np.array([-1, -2]), b=np.array([-5, -6])) - def fill_fun(inputs): - for req in inputs: - assert "a" in req and "b" in req - assert req["a"].shape[0] == req["b"].shape[0] - assert np.all(inputs[1]["a"] == np.array([[-1, -2], [-1, -2]])) - assert np.all(inputs[-1]["b"] == np.array([[-5, -6], [-5, -6], [-5, -6]])) - return inputs - - _prepare_and_inject_context_with_config( - TritonModelConfig( - model_name="foo", - inputs=[TensorSpec("a", shape=(2,), dtype=np.int64), TensorSpec("b", shape=(2,), dtype=np.int64)], - outputs=[TensorSpec("a", shape=(2,), dtype=np.int64), TensorSpec("b", shape=(2,), dtype=np.int64)], - ), - fill_fun, - ) - - results = fill_fun(input_requests) - assert len(results) == len(input_requests) - - -def test_fill_optionals_for_not_batching_models(): - @fill_optionals(a=np.array([-1, -2]), b=np.array([-5, -6])) - def infer_fn(inputs): - return inputs - - _prepare_and_inject_context_with_config( - TritonModelConfig( - model_name="foo", - batching=False, - inputs=[TensorSpec("a", shape=(2,), dtype=np.int64), TensorSpec("b", shape=(2,), dtype=np.int64)], - outputs=[TensorSpec("a", shape=(2,), dtype=np.int64), TensorSpec("b", shape=(2,), dtype=np.int64)], - ), - infer_fn, - ) - - inputs = [ - Request({"a": np.array([1, 9]), "b": np.array([1, 2])}, {}), - Request({"b": np.array([3, 4])}, {}), - ] - expected_results = [ - {"a": np.array([1, 9]), "b": np.array([1, 2])}, - {"a": np.array([-1, -2]), "b": np.array([3, 4])}, - ] - - results = infer_fn(inputs) - for result, expected_result in zip(results, expected_results): - assert not set(result) ^ set(expected_result) - for input_name in result: - np.testing.assert_array_equal(result[input_name], expected_result[input_name]) - - -def test_fill_optionals_raise_on_non_numpy_defaults(): - @fill_optionals(a=1, b=np.array([-5, -6])) - def infer_fn(inputs): - return inputs - - _prepare_and_inject_context_with_config( - TritonModelConfig( - model_name="foo", - inputs=[TensorSpec("a", shape=(2,), dtype=np.int64), TensorSpec("b", shape=(2,), dtype=np.int64)], - outputs=[TensorSpec("a", shape=(2,), dtype=np.int64), TensorSpec("b", shape=(2,), dtype=np.int64)], - ), - infer_fn, - ) - - with pytest.raises(PyTritonBadParameterError, match="Could not use a=.* they are not NumPy arrays"): - infer_fn(input_requests) - - -def test_fill_optionals_raise_error_on_dtype_mismatch(): - @fill_optionals(a=np.array([-1, -2]), b=np.array([-5, -6])) - def infer_fn(inputs): - return inputs - - _prepare_and_inject_context_with_config( - TritonModelConfig( - model_name="foo", - inputs=[TensorSpec("a", shape=(2,), dtype=np.int32), TensorSpec("b", shape=(2,), dtype=np.int64)], - outputs=[TensorSpec("a", shape=(2,), dtype=np.int64), TensorSpec("b", shape=(2,), dtype=np.int64)], - ), - infer_fn, - ) - - with pytest.raises( - PyTritonBadParameterError, match="Could not use a: dtype=.* have different than input signature dtypes" - ): - infer_fn(input_requests) - - -def test_fill_optionals_raise_error_on_shape_mismatch(): - @fill_optionals(a=np.array([[-1, -2]]), b=np.array([-5, -6])) - def infer_fn(inputs): - return inputs - - _prepare_and_inject_context_with_config( - TritonModelConfig( - model_name="foo", - inputs=[TensorSpec("a", shape=(2,), dtype=np.int64), TensorSpec("b", shape=(2,), dtype=np.int64)], - outputs=[TensorSpec("a", shape=(2,), dtype=np.int64), TensorSpec("b", shape=(2,), dtype=np.int64)], - ), - infer_fn, - ) - - with pytest.raises( - PyTritonBadParameterError, match="Could not use a: shape=.*have different than input signature shapes" - ): - infer_fn(input_requests) - - -def test_triton_context(): - @triton_context - def fun_with_tr_context(inputs, **kwargs): - assert kwargs.get("triton_context") == "Context" - assert "a" in inputs - assert np.all(inputs["a"] == np.array([1, 2])) - return inputs - - fun_with_tr_context.__triton_context__ = "Context" - res = fun_with_tr_context({"a": np.array([1, 2])}) - assert "a" in res - assert np.all(res["a"] == np.array([1, 2])) - - def fun_without_tr_context(inputs, **kwargs): - assert "triton_context" not in kwargs - assert "a" in inputs - assert np.all(inputs["a"] == np.array([1, 2])) - return inputs - - fun_without_tr_context.__triton_context__ = "Context" - res = fun_without_tr_context({"a": np.array([1, 2])}) - assert "a" in res - assert np.all(res["a"] == np.array([1, 2])) - - -def test_triton_context_not_set(): - @triton_context - def fun_without_tr_context(inputs, **kwargs): - pytest.fail("Should not get here. Should raise error before.") - - try: - _ = fun_without_tr_context({"a": np.array([1, 2])}) - pytest.fail("Error should me raised") - except PyTritonValidationError as ex: - assert "Wrapped function or object must bound with triton to get __triton_context__" in ex.message - except Exception as ex: - raise RuntimeError("PyTritonValidationError should be raised") from ex - pytest.fail("PyTritonValidationError should be raised") - - -def test_inject_and_acquire_triton_context(): - context = TritonContext() - - @triton_context - class A: - def __init__(self, **kwargs): - assert kwargs.get(TRITON_CONTEXT_FIELD_NAME) == context - - @classmethod - def __call__(cls, *args, **kwargs): - assert kwargs.get(TRITON_CONTEXT_FIELD_NAME) == context - - class B: - @triton_context - def fun(self, **kwargs): - assert kwargs.get(TRITON_CONTEXT_FIELD_NAME) == context - - class C: - @triton_context - def __call__(self, *args, **kwargs): - assert kwargs.get(TRITON_CONTEXT_FIELD_NAME) == context - - @triton_context - def fun(**kwargs): - assert kwargs.get(TRITON_CONTEXT_FIELD_NAME) == context - - caller1 = _inject_triton_context(context, A) - caller2 = _inject_triton_context(context, B().fun) - caller3 = _inject_triton_context(context, C()) - caller4 = _inject_triton_context(context, fun) - - caller1() - caller2() - caller3() - caller4() - - -def test_get_triton_context_with_decorators_stack(): - """There should be possible to obtain TritonContext from any decorator in wrappers stack""" - - dummy_config = TritonModelConfig("foo") - - @wrapt.decorator - def my_decorator(wrapped, instance, args, kwargs): - _config = get_model_config(wrapped, instance) - assert _config == dummy_config - - @my_decorator - @batch - def infer_fn(**kwargs): - return kwargs - - _prepare_and_inject_context_with_config(dummy_config, infer_fn) - infer_fn() diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_decorators_batch.py b/stf/stf-api-alternative/pytriton/tests/unit/test_decorators_batch.py deleted file mode 100644 index 73679347a99bdd1918cb82ee3076e86418a46a86..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_decorators_batch.py +++ /dev/null @@ -1,133 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Inference decorators tests.""" -import inspect - -import numpy as np -import pytest - -from pytriton.decorators import TritonContext, batch -from pytriton.model_config.triton_model_config import TensorSpec, TritonModelConfig -from pytriton.models.model import _inject_triton_context -from pytriton.proxy.types import Request - -_sample_requests = [ - Request({"b": np.array([[7, 5], [8, 6]]), "a": np.array([[1], [1]])}, {}), - Request({"b": np.array([[1, 2], [1, 2], [11, 12]]), "a": np.array([[1], [1], [1]])}, {}), - Request({"b": np.array([[1, 2]]), "a": np.array([[1]])}, {}), -] - -_sample_requests_multiplied_by_2 = [ - {"b": np.array([[14, 10], [16, 12]]), "a": np.array([[2], [2]])}, - {"b": np.array([[2, 4], [2, 4], [22, 24]]), "a": np.array([[2], [2], [2]])}, - {"b": np.array([[2, 4]]), "a": np.array([[2]])}, -] - - -@batch -def batched_multiply_2(**_inputs): - assert isinstance(_inputs, dict) - return {key: value * 2 for key, value in _inputs.items()} - - -@batch -def batched_multiply_2_gen(**_inputs): - assert isinstance(_inputs, dict) - yield {key: value * 2 for key, value in _inputs.items()} - - -@pytest.mark.parametrize( - "inputs, infer_fn, expected", - ( - ( - _sample_requests, - batched_multiply_2, - _sample_requests_multiplied_by_2, - ), - ( - _sample_requests, - batched_multiply_2_gen, - _sample_requests_multiplied_by_2, - ), - ), -) -def test_batch(inputs, infer_fn, expected): - results = infer_fn(inputs) - - if inspect.isgenerator(results): - # each item generated by batch function is a list of results for each request - # thus we need to flatten the list of lists - results = [item for partial_results in results for item in partial_results] - - assert len(expected) == len(results) # ensure same number of results as expected - for expected_result, result in zip(expected, results): - assert list(expected_result) == list(result) # ensure same keys - assert all(np.equal(expected_result[key], result[key]).all() for key in expected_result.keys()) - - -@batch -def batched_multiply_2_returning_list(**_inputs): - assert isinstance(_inputs, dict) - return [value * 2 for value in _inputs.values()] - - -@batch -def batched_multiply_2_returning_list_gen(**_inputs): - assert isinstance(_inputs, dict) - yield [value * 2 for value in _inputs.values()] - - -def _prepare_and_inject_context_with_config(config, fn): - context = TritonContext() - context.model_configs[fn] = config - _inject_triton_context(context, fn) - return context - - -@pytest.mark.parametrize( - "inputs, infer_fn, expected", - ( - ( - _sample_requests, - batched_multiply_2_returning_list, - _sample_requests_multiplied_by_2, - ), - ( - _sample_requests, - batched_multiply_2_returning_list_gen, - _sample_requests_multiplied_by_2, - ), - ), -) -def test_batch_with_context(inputs, infer_fn, expected): - # list outputs require the context to be injected - _prepare_and_inject_context_with_config( - config=TritonModelConfig( - "my_model", - inputs=[TensorSpec(name, value.shape, value.dtype) for name, value in inputs[0].items()], - outputs=[TensorSpec(name, value.shape, value.dtype) for name, value in expected[0].items()], - ), - fn=infer_fn, - ) - - test_batch(inputs, infer_fn, expected) - - -def test_batch_raises_on_incorrect_batch_size_of_outputs(): - @batch - def _infer_fn(**_inputs): - return {key: value[:1] * 2 for key, value in _inputs.items()} - - with pytest.raises(ValueError, match=r"Received output tensors with different batch sizes"): - _infer_fn(_sample_requests) diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_futures_client.py b/stf/stf-api-alternative/pytriton/tests/unit/test_futures_client.py deleted file mode 100644 index 355883a55eab1e8b6dac947f59fa618f12670d6a..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_futures_client.py +++ /dev/null @@ -1,520 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import logging -import time -from threading import Event, Thread - -import gevent -import numpy as np -import pytest -from gevent.hub import Hub as GeventHub - -from pytriton.client import FuturesModelClient, ModelClient -from pytriton.client.exceptions import ( - PyTritonClientClosedError, - PyTritonClientInvalidUrlError, - PyTritonClientQueueFullError, - PyTritonClientTimeoutError, - PyTritonClientValueError, -) -from pytriton.model_config import DeviceKind -from pytriton.model_config.triton_model_config import TensorSpec, TritonModelConfig - -from .client_common import ( - ADD_SUB_WITH_BATCHING_MODEL_CONFIG, - GRPC_LOCALHOST_URL, - HTTP_LOCALHOST_URL, - patch_server_model_addsub_no_batch_ready, -) -from .utils import ( - patch_grpc_client__model_up_and_ready, - patch_grpc_client__server_up_and_ready, - patch_http_client__model_up_and_ready, - patch_http_client__server_up_and_ready, -) - -logging.basicConfig(level=logging.DEBUG) -LOGGER = logging.getLogger("test_sync_client") - - -ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG = TritonModelConfig( - model_name="AddSub", - model_version=1, - batching=False, - instance_group={DeviceKind.KIND_CPU: 1}, - inputs=[ - TensorSpec(name="a", shape=(1,), dtype=np.float32), - TensorSpec(name="b", shape=(1,), dtype=np.float32), - ], - outputs=[ - TensorSpec(name="add", shape=(1,), dtype=np.float32), - TensorSpec(name="sub", shape=(1,), dtype=np.float32), - ], - backend_parameters={"shared-memory-socket": "dummy/path"}, -) - - -logging.basicConfig(level=logging.DEBUG) -LOGGER = logging.getLogger("test_sync_client") - - -def test_wait_for_model_raise_error_when_invalid_url_provided(): - with pytest.raises(PyTritonClientInvalidUrlError, match="Invalid url"): - with FuturesModelClient(["localhost:8001"], "dummy") as client: # pytype: disable=wrong-arg-types - client.wait_for_model(timeout_s=0.1).result() - - -@patch_server_model_addsub_no_batch_ready -def test_wait_for_model_passes_timeout_to_client(mocker): - spy_client_close = mocker.spy(ModelClient, ModelClient.close.__name__) - mock_client_wait_for_model = mocker.patch.object(ModelClient, ModelClient.wait_for_model.__name__) - mock_client_wait_for_model.return_value = True - spy_thread_start = mocker.spy(Thread, Thread.start.__name__) - spy_thread_join = mocker.spy(Thread, Thread.join.__name__) - spy_get_hub = mocker.spy(gevent, gevent.get_hub.__name__) - spy_hub_destroy = mocker.spy(GeventHub, GeventHub.destroy.__name__) - with FuturesModelClient( - GRPC_LOCALHOST_URL, - ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name, - str(ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_version), - max_workers=1, - ) as client: - future = client.wait_for_model(15) - result = future.result() - assert result is True - spy_client_close.assert_called_once() - mock_client_wait_for_model.assert_called_with(15) - spy_thread_start.assert_called_once() - spy_thread_join.assert_called_once() - spy_get_hub.assert_called_once() - spy_hub_destroy.assert_called_once() - - -@patch_server_model_addsub_no_batch_ready -def test_infer_raises_error_when_mixed_args_convention_used(mocker): - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - - init_t_timeout_s = 15.0 - - with FuturesModelClient( - GRPC_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name, init_timeout_s=init_t_timeout_s - ) as client: - with pytest.raises( - PyTritonClientValueError, - match="Use either positional either keyword method arguments convention", - ): - client.infer_sample(a, b=b).result() - - with FuturesModelClient(GRPC_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - with pytest.raises( - PyTritonClientValueError, - match="Use either positional either keyword method arguments convention", - ): - client.infer_batch(a, b=b).result() - - -@patch_server_model_addsub_no_batch_ready -def test_infer_sample_returns_values_creates_client(mocker): - a = np.array([1], dtype=np.float32) - b = np.array([2], dtype=np.float32) - c = np.array([3], dtype=np.float32) - - init_t_timeout_s = 15.0 - - mock_client_wait_for_model = mocker.patch.object(ModelClient, ModelClient._wait_and_init_model_config.__name__) - mock_client_infer_sample = mocker.patch.object(ModelClient, ModelClient.infer_sample.__name__) - - mock_client_infer_sample.return_value = c - with FuturesModelClient( - GRPC_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name, init_timeout_s=init_t_timeout_s - ) as client: - result = client.infer_sample(a=a, b=b).result() - mock_client_wait_for_model.assert_called_once_with(init_t_timeout_s) - mock_client_infer_sample.assert_called_once_with(parameters=None, headers=None, a=a, b=b) - # Check the Python version and use different assertions for cancel_futures - assert result == c - - -@patch_server_model_addsub_no_batch_ready -def test_infer_sample_returns_values_creates_client_close_wait(mocker): - a = np.array([1], dtype=np.float32) - b = np.array([2], dtype=np.float32) - c = np.array([3], dtype=np.float32) - - mock_client_infer_sample = mocker.patch.object(ModelClient, ModelClient.infer_sample.__name__) - - # Prevent exit from closing the client - mocker.patch.object(FuturesModelClient, FuturesModelClient.__exit__.__name__) - - mock_client_infer_sample.return_value = c - client = FuturesModelClient(GRPC_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) - result = client.infer_sample(a, b).result() - client.close(wait=True) - mock_client_infer_sample.assert_called_once_with(a, b, parameters=None, headers=None) - assert result == c - - -@patch_server_model_addsub_no_batch_ready -def test_infer_batch_returns_values_creates_client(mocker): - a = np.array([1], dtype=np.float32) - b = np.array([2], dtype=np.float32) - c = np.array([3], dtype=np.float32) - - init_t_timeout_s = 15.0 - - mock_client_infer_batch = mocker.patch.object(ModelClient, ModelClient.infer_batch.__name__) - mock_client_infer_batch.return_value = c - with FuturesModelClient( - GRPC_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name, init_timeout_s=init_t_timeout_s - ) as client: - result = client.infer_batch(a=a, b=b).result() - model_config = client.model_config().result() - mock_client_infer_batch.assert_called_once_with(parameters=None, headers=None, a=a, b=b) - assert model_config.model_name == ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name - assert result == c - - -@patch_server_model_addsub_no_batch_ready -def test_infer_sample_list_passed_arguments_returns_arguments(mocker): - a = np.array([1], dtype=np.float32) - b = np.array([2], dtype=np.float32) - ret = np.array([3], dtype=np.float32) - - patch_client_infer_sample = mocker.patch.object(ModelClient, ModelClient.infer_sample.__name__) - patch_client_infer_sample.return_value = ret - with FuturesModelClient(GRPC_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - return_value = client.infer_sample(a, b).result() - assert return_value == ret - patch_client_infer_sample.assert_called_once_with(a, b, parameters=None, headers=None) - - -@patch_server_model_addsub_no_batch_ready -def test_infer_sample_dict_passed_arguments_returns_arguments(mocker): - a = np.array([1], dtype=np.float32) - b = np.array([2], dtype=np.float32) - ret = np.array([3], dtype=np.float32) - - patch_client_infer_sample = mocker.patch.object(ModelClient, ModelClient.infer_sample.__name__) - patch_client_infer_sample.return_value = ret - with FuturesModelClient(GRPC_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - return_value = client.infer_sample(a=a, b=b).result() - assert return_value == ret - patch_client_infer_sample.assert_called_once_with(a=a, b=b, parameters=None, headers=None) - - -@patch_server_model_addsub_no_batch_ready -def test_infer_batch_list_passed_arguments_returns_arguments(mocker): - a = np.array([1], dtype=np.float32) - b = np.array([2], dtype=np.float32) - ret = np.array([3], dtype=np.float32) - - patch_client_infer_batch = mocker.patch.object(ModelClient, ModelClient.infer_batch.__name__) - patch_client_infer_batch.return_value = ret - with FuturesModelClient(GRPC_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - return_value = client.infer_batch(a, b).result() - assert return_value == ret - patch_client_infer_batch.assert_called_once_with(a, b, parameters=None, headers=None) - - -@patch_server_model_addsub_no_batch_ready -def test_infer_batch_dict_passed_arguments_returns_arguments(mocker): - a = np.array([1], dtype=np.float32) - b = np.array([2], dtype=np.float32) - ret = np.array([3], dtype=np.float32) - - patch_client_infer_batch = mocker.patch.object(ModelClient, ModelClient.infer_batch.__name__) - patch_client_infer_batch.return_value = ret - with FuturesModelClient(GRPC_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - return_value = client.infer_batch(a=a, b=b).result() - assert return_value == ret - patch_client_infer_batch.assert_called_once_with(parameters=None, headers=None, a=a, b=b) - - -@patch_server_model_addsub_no_batch_ready -def test_infer_batch_blocking_behaviour(mocker): - a = np.array([1], dtype=np.float32) - b = np.array([2], dtype=np.float32) - c = np.array([2], dtype=np.float32) - ret = np.array([3], dtype=np.float32) - - # Set up the queue return values to block the queue and then release it - infer_called_with_b_event = Event() - infer_called_with_c_event = Event() - - queue_is_full_event = Event() - - def mock_submit_side_effect(*args, **kwargs): - LOGGER.debug("mock_submit_side_effect called") - assert "b" in kwargs - if kwargs["b"] is b: - infer_called_with_b_event.set() - elif kwargs["b"] is c: - infer_called_with_c_event.set() - if not queue_is_full_event.is_set(): - LOGGER.debug("mock_submit_side_effect waiting for queue to be full") - queue_is_full_event.wait() # Block until the event is set - LOGGER.debug("mock_submit_side_effect returning") - return ret - - patch_client_infer_batch = mocker.patch.object(ModelClient, ModelClient.infer_batch.__name__) - patch_client_infer_batch.side_effect = mock_submit_side_effect - - # Set up the client with a max_queue_size of 1 to easily simulate full condition - with FuturesModelClient( - GRPC_LOCALHOST_URL, - ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name, - max_workers=1, - max_queue_size=1, - non_blocking=False, - ) as client: - client.model_config().result() # Wait for the model to be ready - LOGGER.debug("Client created") - first_future = client.infer_batch(a=a, b=b) - LOGGER.debug("First future created") - infer_called_with_b_event.wait() # Wait for the first call to be made - LOGGER.debug("First call made") - - blocked_thread_start_event = Event() - blocked_thread_result = {} - - def blocked_thread(): - LOGGER.debug("Blocked thread started") - blocked_thread_start_event.set() - LOGGER.debug("Blocked thread waiting for queue to be full") - result = client.infer_batch(a=a, b=c).result() - LOGGER.debug("Blocked thread got result") - blocked_thread_result["ret"] = result - - infer_thread = Thread(target=blocked_thread) - infer_thread.start() - LOGGER.debug("Waiting for blocked thread to start") - blocked_thread_start_event.wait() # Wait for the thread to start - LOGGER.debug("Blocked thread started") - time.sleep(0.1) # Wait a bit to make sure the thread is blocked - assert not infer_called_with_c_event.is_set(), "infer_batch should not have been called with c yet." - - # The blocking call should be waiting by now, so let's release the block - LOGGER.debug("Releasing queue") - queue_is_full_event.set() - - # Wait for the blocked thread to finish - LOGGER.debug("Waiting for blocked thread to finish") - infer_thread.join() - assert blocked_thread_result["ret"] is ret - - # Wait for the first future to finish - assert first_future.result() is ret - - assert ( - patch_client_infer_batch.call_count == 2 - ), "infer_batch should have been called twice (one blocked, one released)." - - -@patch_server_model_addsub_no_batch_ready -def test_infer_batch_non_blocking_behaviour(mocker): - a = np.array([1], dtype=np.float32) - b = np.array([2], dtype=np.float32) - c = np.array([2], dtype=np.float32) - ret = np.array([3], dtype=np.float32) - - # Set up the queue return values to block the queue and then release it - infer_called_with_b_event = Event() - - queue_is_full_event = Event() - - def mock_submit_side_effect(*args, **kwargs): - LOGGER.debug("mock_submit_side_effect called") - infer_called_with_b_event.set() - if not queue_is_full_event.is_set(): - LOGGER.debug("mock_submit_side_effect waiting for queue to be full") - queue_is_full_event.wait() # Block until the event is set - LOGGER.debug("mock_submit_side_effect returning") - return ret - - patch_client_infer_batch = mocker.patch.object(ModelClient, ModelClient.infer_batch.__name__) - patch_client_infer_batch.side_effect = mock_submit_side_effect - - # Set up the client with a max_queue_size of 1 to easily simulate full condition - with FuturesModelClient( - GRPC_LOCALHOST_URL, - ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name, - max_workers=1, - max_queue_size=1, - non_blocking=True, - ) as client: - LOGGER.debug("Client created") - while True: - try: - client.model_config().result() # Wait for the model to be ready - break - except PyTritonClientQueueFullError: - LOGGER.debug("Waiting for model to be ready") - time.sleep(0.1) - pass - first_future = client.infer_batch(a=a, b=b) - LOGGER.debug("First future created") - infer_called_with_b_event.wait() # Wait for the first call to be made - LOGGER.debug("First call made") - second_future = client.infer_batch(a=a, b=c) - LOGGER.debug("Second future created") - with pytest.raises(PyTritonClientQueueFullError): - LOGGER.debug("Calling infer_batch with queue full") - client.infer_batch(a=a, b=c) - - # The blocking call should be waiting by now, so let's release the block - LOGGER.debug("Releasing queue") - queue_is_full_event.set() - - # Wait for the first future to finish - assert first_future.result() is ret - assert second_future.result() is ret - - assert patch_client_infer_batch.call_count == 2, "infer_batch should have been called once." - - -@patch_server_model_addsub_no_batch_ready -def test_infer_batch_queue_timeout(mocker): - a = np.array([1], dtype=np.float32) - b = np.array([2], dtype=np.float32) - c = np.array([2], dtype=np.float32) - ret = np.array([3], dtype=np.float32) - - # Set up the queue return values to block the queue and then release it - infer_called_with_b_event = Event() - - queue_is_full_event = Event() - - def mock_submit_side_effect(*args, **kwargs): - LOGGER.debug("mock_submit_side_effect called") - infer_called_with_b_event.set() - if not queue_is_full_event.is_set(): - LOGGER.debug("mock_submit_side_effect waiting for queue to be full") - queue_is_full_event.wait() # Block until the event is set - LOGGER.debug("mock_submit_side_effect returning") - return ret - - patch_client_infer_batch = mocker.patch.object(ModelClient, ModelClient.infer_batch.__name__) - patch_client_infer_batch.side_effect = mock_submit_side_effect - - # Set up the client with a max_queue_size of 1 to easily simulate full condition - with FuturesModelClient( - GRPC_LOCALHOST_URL, - ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name, - max_workers=1, - max_queue_size=1, - inference_timeout_s=0.1, - ) as client: - LOGGER.debug("Client created") - client.model_config().result() # Wait for the model to be ready - first_future = client.infer_batch(a=a, b=b) - LOGGER.debug("First future created") - infer_called_with_b_event.wait() # Wait for the first call to be made - LOGGER.debug("First call made") - second_future = client.infer_batch(a=a, b=c) - LOGGER.debug("Second future created") - with pytest.raises(PyTritonClientQueueFullError): - LOGGER.debug("Calling infer_batch with queue full") - client.infer_batch(a=a, b=c) - - # The blocking call should be waiting by now, so let's release the block - LOGGER.debug("Releasing queue") - queue_is_full_event.set() - - # Wait for the first future to finish - assert first_future.result() is ret - assert second_future.result() is ret - - assert patch_client_infer_batch.call_count == 2, "infer_batch should have been called once." - - -def test_init_raises_error_when_invalid_max_workers_provided(mocker): - with pytest.raises(ValueError): - with FuturesModelClient(GRPC_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name, max_workers=-1): - pass - - -def test_init_raises_error_when_invalid_max_queue_size_provided(mocker): - with pytest.raises(ValueError): - with FuturesModelClient(GRPC_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name, max_queue_size=-1): - pass - - -@pytest.mark.timeout(1.0) -def test_init_http_passes_timeout(mocker): - with FuturesModelClient("http://localhost:6669", "dummy", init_timeout_s=0.2, inference_timeout_s=0.1) as client: - with pytest.raises(PyTritonClientTimeoutError): - client.wait_for_model(timeout_s=0.2).result() - - -@pytest.mark.timeout(5) -def test_init_grpc_passes_timeout_5(mocker): - with FuturesModelClient("grpc://localhost:6669", "dummy", init_timeout_s=0.2, inference_timeout_s=0.1) as client: - with pytest.raises(PyTritonClientTimeoutError): - client.wait_for_model(timeout_s=0.2).result() - - -@pytest.mark.timeout(5) -def test_init_http_spaws_several_threads(mocker): - spy_thread_start = mocker.spy(Thread, Thread.start.__name__) - - with FuturesModelClient("http://localhost:6669", "dummy", init_timeout_s=1, inference_timeout_s=0.2) as client: - timeout_s = 0.2 - # The list function is used to force the evaluation of the list comprehension before iterating over the futures and - # calling their result method. This is done to ensure that all the calls occur before the iteration starts, - # and to verify that five threads are created. - futures = list([client.wait_for_model(timeout_s=timeout_s) for _ in range(5)]) # noqa: C411 - for future in futures: - with pytest.raises(PyTritonClientTimeoutError): - future.result() - # Reusing client configuration from existing clients forces wait in other threads to finish first configuration - # request. It sometimes prevents creation of a fifth thread because one of the existing threads can handle another request - # before the new thread is created. This results in a race condition that affects the number of created threads. - assert spy_thread_start.call_count > 1 - - -def test_http_client_raises_error_when_used_after_close(mocker): - patch_http_client__server_up_and_ready(mocker) - patch_http_client__model_up_and_ready(mocker, ADD_SUB_WITH_BATCHING_MODEL_CONFIG) - - with ModelClient(HTTP_LOCALHOST_URL, "dummy") as client: - pass - - with pytest.raises(PyTritonClientClosedError): - client.wait_for_model(timeout_s=0.2) - - a = np.array([1], dtype=np.float32) - with pytest.raises(PyTritonClientClosedError): - client.infer_sample(a=a) - - with pytest.raises(PyTritonClientClosedError): - client.infer_batch(a=[a]) - - -def test_grpc_client_raises_error_when_used_after_close(mocker): - patch_grpc_client__server_up_and_ready(mocker) - patch_grpc_client__model_up_and_ready(mocker, ADD_SUB_WITH_BATCHING_MODEL_CONFIG) - - with FuturesModelClient(GRPC_LOCALHOST_URL, "dummy") as client: - pass - - with pytest.raises(PyTritonClientClosedError): - client.wait_for_model(timeout_s=0.2).result() - - a = np.array([1], dtype=np.float32) - with pytest.raises(PyTritonClientClosedError): - client.infer_sample(a=a).result() - - with pytest.raises(PyTritonClientClosedError): - client.infer_batch(a=[a]).result() diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_init.py b/stf/stf-api-alternative/pytriton/tests/unit/test_init.py deleted file mode 100644 index fc4612ffa73d9d6ec4f0d9815e26e7746d660324..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_init.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# noqa: D104 -from importlib.metadata import version - -import pytriton - - -def test_version(): - assert pytriton.__version__ == version("nvidia-pytriton") diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_model.py b/stf/stf-api-alternative/pytriton/tests/unit/test_model.py deleted file mode 100644 index a8860a13c30674bfa86733d413ae5a574a28953e..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_model.py +++ /dev/null @@ -1,521 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import pathlib -import tempfile - -import numpy as np - -from pytriton.decorators import TritonContext, batch -from pytriton.model_config.tensor import Tensor -from pytriton.model_config.triton_model_config import TensorSpec -from pytriton.models.manager import ModelManager -from pytriton.models.model import Model, ModelConfig -from pytriton.proxy.communication import TensorStore -from pytriton.proxy.types import Request -from pytriton.utils.workspace import Workspace - - -def test_get_model_config_return_model_config_when_minimal_required_data(tmp_path): - def infer_func(inputs): - return inputs - - triton_context = TritonContext() - workspace = Workspace(tmp_path / "workspace") - model = Model( - model_name="simple", - model_version=2, - inference_fn=infer_func, - inputs=[ - Tensor(dtype=np.float32, shape=(-1,)), - Tensor(dtype=np.float32, shape=(-1,)), - ], - outputs=[ - Tensor(dtype=np.int32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128, batching=True), - workspace=workspace, - triton_context=triton_context, - strict=False, - ) - - model_config = model._get_triton_model_config() - - assert model_config.model_name == "simple" - assert model_config.model_version == 2 - - assert model_config.batching is True - assert model_config.max_batch_size == 128 - - assert model_config.inputs == [ - TensorSpec(name="INPUT_1", dtype=np.float32, shape=(-1,)), - TensorSpec(name="INPUT_2", dtype=np.float32, shape=(-1,)), - ] - - assert model_config.outputs == [ - TensorSpec(name="OUTPUT_1", dtype=np.int32, shape=(-1,)), - ] - - ipc_socket_path = workspace.path / "ipc_proxy_backend_simple" - assert model_config.backend_parameters == { - "shared-memory-socket": f"ipc://{ipc_socket_path.as_posix()}", - } - - -def test_get_model_config_return_model_config_when_custom_names(): - def infer_func(inputs): - return inputs - - triton_context = TritonContext() - with tempfile.TemporaryDirectory() as tempdir: - tempdir = pathlib.Path(tempdir) - workspace = Workspace(tempdir / "workspace") - model = Model( - model_name="simple", - model_version=2, - inference_fn=infer_func, - inputs=[ - Tensor(name="variable1", dtype=object, shape=(2, 1)), - Tensor(name="variable2", dtype=np.float32().dtype, shape=(2, 1)), - ], - outputs=[ - Tensor(name="factorials", dtype=np.int32().dtype, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128, batching=True), - workspace=workspace, - triton_context=triton_context, - strict=False, - ) - - model_config = model._get_triton_model_config() - - assert model_config.model_name == "simple" - assert model_config.model_version == 2 - - assert model_config.batching is True - assert model_config.max_batch_size == 128 - - assert model_config.inputs == [ - TensorSpec(name="variable1", dtype=object, shape=(2, 1)), - TensorSpec(name="variable2", dtype=np.float32, shape=(2, 1)), - ] - - assert model_config.outputs == [ - TensorSpec(name="factorials", dtype=np.int32, shape=(-1,)), - ] - - -def test_generate_model_create_model_store(): - def infer_func(inputs): - return inputs - - triton_context = TritonContext() - with tempfile.TemporaryDirectory() as tempdir: - tempdir = pathlib.Path(tempdir) - workspace = Workspace(tempdir / "workspace") - model = Model( - model_name="simple", - model_version=2, - inference_fn=infer_func, - inputs=[ - Tensor(name="variable1", dtype=object, shape=(2, 1)), - Tensor(name="variable2", dtype=np.float32, shape=(2, 1)), - ], - outputs=[ - Tensor(name="factorials", dtype=np.int32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128, batching=True), - workspace=workspace, - triton_context=triton_context, - strict=False, - ) - - with tempfile.TemporaryDirectory() as tempdir: - model_repository = pathlib.Path(tempdir) / "model_repository" - model_repository.mkdir() - - model.generate_model(model_repository) - - assert (model_repository / "simple").is_dir() - assert (model_repository / "simple" / "config.pbtxt").is_file() - - assert (model_repository / "simple" / "2").is_dir() - assert (model_repository / "simple" / "2" / "model.py").is_file() - - -def test_generate_models_with_same_names_and_different_versions_create_model_store(): - def infer_func(inputs): - return inputs - - triton_context = TritonContext() - with tempfile.TemporaryDirectory() as tempdir: - tempdir = pathlib.Path(tempdir) - workspace = Workspace(tempdir / "workspace") - model1 = Model( - model_name="simple", - model_version=1, - inference_fn=infer_func, - inputs=[ - Tensor(name="variable1", dtype=object, shape=(2, 1)), - Tensor(name="variable2", dtype=np.float32, shape=(2, 1)), - ], - outputs=[ - Tensor(name="factorials", dtype=np.int32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128, batching=True), - workspace=workspace, - triton_context=triton_context, - strict=False, - ) - model2 = Model( - model_name="simple", - model_version=2, - inference_fn=infer_func, - inputs=[ - Tensor(name="variable1", dtype=object, shape=(2, 1)), - Tensor(name="variable2", dtype=np.float32, shape=(2, 1)), - ], - outputs=[ - Tensor(name="factorials", dtype=np.int32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128, batching=True), - workspace=workspace, - triton_context=triton_context, - strict=False, - ) - - with tempfile.TemporaryDirectory() as tempdir: - model_repository = pathlib.Path(tempdir) / "model_repository" - model_repository.mkdir() - - model1.generate_model(model_repository) - model2.generate_model(model_repository) - - assert (model_repository / "simple").is_dir() - assert (model_repository / "simple" / "config.pbtxt").is_file() - - assert (model_repository / "simple" / "1").is_dir() - assert (model_repository / "simple" / "1" / "model.py").is_file() - - assert (model_repository / "simple" / "2").is_dir() - assert (model_repository / "simple" / "2" / "model.py").is_file() - - -def test_setup_create_proxy_backend_connection(tmp_path): - def infer_func(inputs): - return inputs - - triton_context = TritonContext() - workspace = Workspace(tmp_path / "workspace") - tensor_store = TensorStore(workspace.path / "data_store.sock") - model = Model( - model_name="simple", - model_version=2, - inference_fn=infer_func, - inputs=[ - Tensor(name="variable1", dtype=object, shape=(2, 1)), - Tensor(name="variable2", dtype=np.float32, shape=(2, 1)), - ], - outputs=[ - Tensor(name="factorials", dtype=np.int32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128, batching=True), - workspace=workspace, - triton_context=triton_context, - strict=False, - ) - - try: - tensor_store.start() - model.setup() - assert len(model._inference_handlers) == 1 - finally: - model.clean() - tensor_store.close() - - -def test_setup_can_be_called_multiple_times(tmp_path): - def infer_func(inputs): - return inputs - - triton_context = TritonContext() - workspace = Workspace(tmp_path / "workspace") - tensor_store = TensorStore(workspace.path / "data_store.sock") - model = Model( - model_name="simple", - model_version=2, - inference_fn=infer_func, - inputs=[ - Tensor(name="variable1", dtype=object, shape=(2, 1)), - Tensor(name="variable2", dtype=np.float32, shape=(2, 1)), - ], - outputs=[ - Tensor(name="factorials", dtype=np.int32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128, batching=True), - workspace=workspace, - triton_context=triton_context, - strict=False, - ) - - try: - tensor_store.start() - model.setup() - assert len(model._inference_handlers) == 1 - python_backend1 = model._inference_handlers[0] - - assert python_backend1 is not None - - model.setup() - assert len(model._inference_handlers) == 1 - python_backend2 = model._inference_handlers[0] - - assert python_backend2 is not None - assert python_backend1 == python_backend2 - - finally: - model.clean() - tensor_store.close() - - -def test_clean_remove_proxy_backend_connection(tmp_path): - def infer_func(inputs): - return inputs - - triton_context = TritonContext() - workspace = Workspace(tmp_path / "workspace") - tensor_store = TensorStore(workspace.path / "data_store.sock") - model = Model( - model_name="simple", - model_version=2, - inference_fn=infer_func, - inputs=[ - Tensor(name="variable1", dtype=object, shape=(2, 1)), - Tensor(name="variable2", dtype=np.float32, shape=(2, 1)), - ], - outputs=[ - Tensor(name="factorials", dtype=np.int32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128, batching=True), - workspace=workspace, - triton_context=triton_context, - strict=False, - ) - - try: - tensor_store.start() - model.setup() - finally: - model.clean() - tensor_store.close() - assert len(model._inference_handlers) == 0 - - -def test_clean_can_be_called_multiple_times(tmp_path): - def infer_func(inputs): - return inputs - - triton_context = TritonContext() - workspace = Workspace(tmp_path / "workspace") - tensor_store = TensorStore(workspace.path / "data_store.sock") - model = Model( - model_name="simple", - model_version=2, - inference_fn=infer_func, - inputs=[ - Tensor(name="variable1", dtype=object, shape=(2, 1)), - Tensor(name="variable2", dtype=np.float32, shape=(2, 1)), - ], - outputs=[ - Tensor(name="factorials", dtype=np.int32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128, batching=True), - workspace=workspace, - triton_context=triton_context, - strict=False, - ) - - try: - tensor_store.start() - model.setup() - model.clean() - model.clean() - assert len(model._inference_handlers) == 0 - finally: - tensor_store.close() - - -def test_is_alive_return_false_when_model_not_setup(tmp_path): - def infer_func(inputs): - return inputs - - triton_context = TritonContext() - with tempfile.TemporaryDirectory() as tempdir: - tempdir = pathlib.Path(tempdir) - workspace = Workspace(tempdir / "workspace") - model = Model( - model_name="simple", - model_version=2, - inference_fn=infer_func, - inputs=[ - Tensor(name="variable1", dtype=object, shape=(2, 1)), - Tensor(name="variable2", dtype=np.float32, shape=(2, 1)), - ], - outputs=[ - Tensor(name="factorials", dtype=np.int32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128, batching=True), - workspace=workspace, - triton_context=triton_context, - strict=False, - ) - - assert not model.is_alive() - - -def test_is_alive_return_true_when_model_is_setup(tmp_path): - def infer_func(inputs): - return inputs - - triton_context = TritonContext() - workspace = Workspace(tmp_path / "workspace") - tensor_store = TensorStore(workspace.path / "data_store.sock") - model = Model( - model_name="simple", - model_version=2, - inference_fn=infer_func, - inputs=[ - Tensor(name="variable1", dtype=object, shape=(2, 1)), - Tensor(name="variable2", dtype=np.float32, shape=(2, 1)), - ], - outputs=[ - Tensor(name="factorials", dtype=np.int32, shape=(-1,)), - ], - config=ModelConfig(max_batch_size=128, batching=True), - workspace=workspace, - triton_context=triton_context, - strict=False, - ) - - try: - tensor_store.start() - model.setup() - assert model.is_alive() - assert len(model._inference_handlers) == 1 - finally: - model.clean() - tensor_store.close() - - -def test_triton_context_injection(tmp_path): - class Multimodel: - @batch - def infer1(self, variable1): - return [variable1] - - @batch - def infer2(self, variable2): - return [variable2] - - m = Multimodel() - - @batch - def infer_func(variable3): - return [variable3] - - triton_context = TritonContext() - workspace = Workspace(tmp_path / "workspace") - tensor_store = TensorStore(workspace.path / "data_store.sock") - tensor_store.start() - model1 = Model( - model_name="simple1", - model_version=1, - inference_fn=m.infer1, - inputs=[ - Tensor(name="variable1", dtype=np.int32, shape=(2, 1)), - ], - outputs=[ - Tensor(name="out1", dtype=np.int32, shape=(2, 1)), - ], - config=ModelConfig(max_batch_size=128, batching=True), - workspace=workspace, - triton_context=triton_context, - strict=False, - ) - model2 = Model( - model_name="simple2", - model_version=1, - inference_fn=m.infer2, - inputs=[ - Tensor(name="variable2", dtype=np.int32, shape=(2, 1)), - ], - outputs=[ - Tensor(name="out2", dtype=np.int32, shape=(2, 1)), - ], - config=ModelConfig(max_batch_size=128, batching=True), - workspace=workspace, - triton_context=triton_context, - strict=False, - ) - model3 = Model( - model_name="simple3", - model_version=1, - inference_fn=infer_func, - inputs=[ - Tensor(name="variable3", dtype=np.int32, shape=(2, 1)), - ], - outputs=[ - Tensor(name="out3", dtype=np.int32, shape=(2, 1)), - ], - config=ModelConfig(max_batch_size=128, batching=True), - workspace=workspace, - triton_context=triton_context, - strict=False, - ) - - manager = ModelManager("") - try: - manager.add_model(model1) - model1.setup() - manager.add_model(model2) - model2.setup() - manager.add_model(model3) - model3.setup() - - input_requests1 = [Request({"variable1": np.array([[7, 5], [8, 6]])}, {})] - input_requests2 = [Request({"variable2": np.array([[1, 2], [1, 2], [11, 12]])}, {})] - input_requests3 = [Request({"variable3": np.array([[1, 2]])}, {})] - - def assert_inputs_properly_mapped_to_outputs(expected_out_name, outputs, input_request_arr): - assert len(outputs) == 1 - assert expected_out_name in outputs[0] - assert outputs[0][expected_out_name].shape == input_request_arr.shape - assert np.array_equal(outputs[0][expected_out_name], input_request_arr) - - outputs1 = m.infer1(input_requests1) - assert_inputs_properly_mapped_to_outputs("out1", outputs1, input_requests1[0]["variable1"]) - - outputs2 = m.infer2(input_requests2) - assert_inputs_properly_mapped_to_outputs("out2", outputs2, input_requests2[0]["variable2"]) - - outputs3 = infer_func(input_requests3) - assert_inputs_properly_mapped_to_outputs("out3", outputs3, input_requests3[0]["variable3"]) - - outputs1 = m.infer1(input_requests1) - assert_inputs_properly_mapped_to_outputs("out1", outputs1, input_requests1[0]["variable1"]) - - outputs3 = infer_func(input_requests3) - assert_inputs_properly_mapped_to_outputs("out3", outputs3, input_requests3[0]["variable3"]) - finally: - manager.clean() - tensor_store.close() diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_model_config_generator.py b/stf/stf-api-alternative/pytriton/tests/unit/test_model_config_generator.py deleted file mode 100644 index 111e321e2a6573971d963ef1637ba817c9e6bbd7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_model_config_generator.py +++ /dev/null @@ -1,590 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import pathlib -import tempfile - -import numpy as np -import pytest - -from pytriton.exceptions import PyTritonBadParameterError -from pytriton.model_config.common import DeviceKind, DynamicBatcher, QueuePolicy, TimeoutAction -from pytriton.model_config.generator import ModelConfigGenerator -from pytriton.model_config.triton_model_config import ResponseCache, TensorSpec, TritonModelConfig - -from .common import full_model_config - - -def _load_config(config_path: pathlib.Path): - """Load model config from path. - - Args: - config_path: path to file with model config - - Returns: - Dictionary with configuration - """ - from google.protobuf import json_format, text_format # pytype: disable=pyi-error - from tritonclient.grpc import model_config_pb2 # pytype: disable=import-error - - with config_path.open("r") as config_file: - payload = config_file.read() - model_config_proto = text_format.Parse(payload, model_config_pb2.ModelConfig()) - - model_config_dict = json_format.MessageToDict(model_config_proto, preserving_proto_field_name=True) - return model_config_dict - - -def test_set_batching_raise_error_when_mbs_is_0_and_batching_is_not_disabled(): - model_config = TritonModelConfig(model_name="simple", batching=True, max_batch_size=0) - generator = ModelConfigGenerator(model_config) - - model_config_data = {} - with pytest.raises(PyTritonBadParameterError, match="The `max_batch_size` must be greater or equal to 1."): - generator._set_batching(model_config_data) - - -def test_set_batching_raise_error_when_mbs_is_less_then_0_and_batching_is_not_disabled(): - model_config = TritonModelConfig(model_name="simple", batching=True, max_batch_size=-1) - generator = ModelConfigGenerator(model_config) - - model_config_data = {} - with pytest.raises(PyTritonBadParameterError, match="The `max_batch_size` must be greater or equal to 1."): - generator._set_batching(model_config_data) - - -def test_set_batching_set_max_batch_size_to_0_when_batching_is_disabled(): - model_config = TritonModelConfig(model_name="simple", batching=False) - generator = ModelConfigGenerator(model_config) - - model_config_data = {} - generator._set_batching(model_config_data) - - assert model_config_data == { - "max_batch_size": 0, - } - - -def test_set_batching_set_max_batch_size_to_default_when_batching_set_to_default(): - model_config = TritonModelConfig(model_name="simple", batching=True) - generator = ModelConfigGenerator(model_config) - - model_config_data = {} - generator._set_batching(model_config_data) - - assert model_config_data == {"max_batch_size": 4} - - -def test_set_batching_set_dynamic_batching_field_when_batcher_set_to_dynamic(): - model_config = TritonModelConfig(model_name="simple", batching=True, batcher=DynamicBatcher()) - generator = ModelConfigGenerator(model_config) - - model_config_data = {} - generator._set_batching(model_config_data) - - assert model_config_data == {"max_batch_size": 4, "dynamic_batching": {}} - - -def test_set_batching_set_max_batch_size_when_batching_enabled_and_value_passed(): - model_config = TritonModelConfig(model_name="simple", batching=True, max_batch_size=16) - generator = ModelConfigGenerator(model_config) - - model_config_data = {} - generator._set_batching(model_config_data) - - assert model_config_data == {"max_batch_size": 16} - - -def test_set_batching_set_dynamic_batching_config_when_dynamic_batching_enabled_and_flags_passed(): - model_config = TritonModelConfig( - model_name="simple", - batching=True, - max_batch_size=16, - batcher=DynamicBatcher( - preferred_batch_size=[16, 32], - max_queue_delay_microseconds=100, - preserve_ordering=True, - ), - ) - generator = ModelConfigGenerator(model_config) - - model_config_data = {} - generator._set_batching(model_config_data) - - assert model_config_data["max_batch_size"] == 16 - assert model_config_data["dynamic_batching"] == { - "preferredBatchSize": [16, 32], - "maxQueueDelayMicroseconds": 100, - "preserveOrdering": True, - } - - model_config_data = {} - model_config.batcher.preserve_ordering = False - - generator._set_batching(model_config_data) - - assert model_config_data == { - "max_batch_size": 16, - "dynamic_batching": { - "preferredBatchSize": [16, 32], - "maxQueueDelayMicroseconds": 100, - }, - } - - -def test_set_batching_raise_exception_when_invalid_default_priority_level_passed(): - model_config = TritonModelConfig( - model_name="simple", - batching=True, - max_batch_size=16, - batcher=DynamicBatcher( - priority_levels=5, - default_priority_level=6, - ), - ) - generator = ModelConfigGenerator(model_config) - - model_config_data = {} - - with pytest.raises(PyTritonBadParameterError, match="The `default_priority_level` must be between 1 and 5."): - generator._set_batching(model_config_data) - - -def test_set_batching_set_dynamic_batching_config_when_default_queue_policy_passed(): - model_config = TritonModelConfig( - model_name="simple", - batching=True, - max_batch_size=16, - batcher=DynamicBatcher( - default_queue_policy=QueuePolicy( - allow_timeout_override=True, - timeout_action=TimeoutAction.DELAY, - default_timeout_microseconds=100, - max_queue_size=2, - ) - ), - ) - generator = ModelConfigGenerator(model_config) - - model_config_data = {} - generator._set_batching(model_config_data) - - assert model_config_data == { - "max_batch_size": 16, - "dynamic_batching": { - "defaultQueuePolicy": { - "allowTimeoutOverride": 1, - "timeoutAction": "DELAY", - "defaultTimeoutMicroseconds": 100, - "maxQueueSize": 2, - } - }, - } - - -def test_set_batching_raise_exception_when_priority_queue_policy_passed_but_no_default_priority_level(): - model_config = TritonModelConfig( - model_name="simple", - batching=True, - max_batch_size=16, - batcher=DynamicBatcher( - priority_queue_policy={ - 1: QueuePolicy( - allow_timeout_override=True, - timeout_action=TimeoutAction.DELAY, - default_timeout_microseconds=100, - max_queue_size=2, - ) - }, - ), - ) - generator = ModelConfigGenerator(model_config) - - model_config_data = {} - - with pytest.raises( - PyTritonBadParameterError, - match="Provide the `priority_levels` if you want to define `priority_queue_policy` for Dynamic Batching.", - ): - generator._set_batching(model_config_data) - - -def test_set_batching_raise_exception_when_invalid_priority_queue_policy_passed(): - model_config = TritonModelConfig( - model_name="simple", - batching=True, - max_batch_size=16, - batcher=DynamicBatcher( - priority_levels=5, - default_priority_level=2, - priority_queue_policy={ - 6: QueuePolicy( - allow_timeout_override=True, - timeout_action=TimeoutAction.DELAY, - default_timeout_microseconds=100, - max_queue_size=2, - ) - }, - ), - ) - generator = ModelConfigGenerator(model_config) - - model_config_data = {} - - with pytest.raises( - PyTritonBadParameterError, match="Invalid `priority`=6 provided. The value must be between 1 and 5." - ): - generator._set_batching(model_config_data) - - -def test_set_batching_set_dynamic_batching_config_when_priority_queue_policy_passed(): - model_config = TritonModelConfig( - model_name="simple", - batching=True, - max_batch_size=16, - batcher=DynamicBatcher( - priority_levels=3, - default_priority_level=1, - priority_queue_policy={ - 2: QueuePolicy( - allow_timeout_override=True, - timeout_action=TimeoutAction.DELAY, - default_timeout_microseconds=100, - max_queue_size=2, - ) - }, - ), - ) - generator = ModelConfigGenerator(model_config) - - model_config_data = {} - generator._set_batching(model_config_data) - - assert model_config_data == { - "max_batch_size": 16, - "dynamic_batching": { - "priorityLevels": 3, - "defaultPriorityLevel": 1, - "priorityQueuePolicy": { - 2: { - "allowTimeoutOverride": 1, - "timeoutAction": "DELAY", - "defaultTimeoutMicroseconds": 100, - "maxQueueSize": 2, - } - }, - }, - } - - -def test_set_instance_group_not_update_data_when_instance_group_not_provided(): - model_config = TritonModelConfig(model_name="simple") - generator = ModelConfigGenerator(model_config) - - model_config_data = {} - generator._set_instance_group(model_config_data) - - assert model_config_data == {} - - -def test_set_instance_group_set_instance_configuration_when_single_config_provided(): - model_config = TritonModelConfig(model_name="simple", instance_group={DeviceKind.KIND_GPU: None}) - generator = ModelConfigGenerator(model_config) - - model_config_data = {} - generator._set_instance_group(model_config_data) - - assert model_config_data == { - "instance_group": [ - {"kind": DeviceKind.KIND_GPU.value, "count": None}, - ] - } - - -def test_set_instance_group_set_instance_configuration_when_single_multiple_configs_provided(): - model_config = TritonModelConfig( - model_name="simple", - instance_group={ - DeviceKind.KIND_GPU: None, - DeviceKind.KIND_CPU: 10, - }, - ) - generator = ModelConfigGenerator(model_config) - - model_config_data = {} - generator._set_instance_group(model_config_data) - - assert model_config_data == { - "instance_group": [ - {"kind": DeviceKind.KIND_GPU.value, "count": None}, - {"kind": DeviceKind.KIND_CPU.value, "count": 10}, - ] - } - - -def test_transaction_policy_not_update_data_when_decoupled_execution_disabled(): - model_config = TritonModelConfig(model_name="simple", decoupled=False) - generator = ModelConfigGenerator(model_config) - model_config_data = {} - generator._set_model_transaction_policy(model_config_data) - assert model_config_data == {} - - -def test_transaction_policy_added_when_decoupled_execution_enabled(): - model_config = TritonModelConfig(model_name="simple", decoupled=True) - generator = ModelConfigGenerator(model_config) - model_config_data = {} - generator._set_model_transaction_policy(model_config_data) - assert model_config_data == {"model_transaction_policy": {"decoupled": True}} - - -def test_set_backend_parameters_not_update_data_when_parameters_not_provided(): - model_config = TritonModelConfig(model_name="simple") - generator = ModelConfigGenerator(model_config) - - model_config_data = {} - generator._set_backend_parameters(model_config_data) - - assert model_config_data == {} - - -def test_set_backend_parameters_update_config_when_parameters_provided(): - model_config = TritonModelConfig( - model_name="simple", - backend_parameters={ - "parameter1": "value1", - "parameter2": "value2", - }, - ) - generator = ModelConfigGenerator(model_config) - - model_config_data = {} - generator._set_backend_parameters(model_config_data) - - assert model_config_data == { - "parameters": { - "parameter1": {"string_value": "value1"}, - "parameter2": {"string_value": "value2"}, - } - } - - -def test_set_model_signature_not_update_data_when_spec_not_provided(): - model_config = TritonModelConfig(model_name="simple") - generator = ModelConfigGenerator(model_config) - - model_config_data = {} - generator._set_model_signature(model_config_data) - - assert model_config_data == {} - - -def test_set_model_signature_update_data_when_spec_provided(): - model_config = TritonModelConfig( - model_name="simple", - inputs=[ - TensorSpec(name="INPUT_1", dtype=np.float32, shape=(-1,)), - TensorSpec(name="INPUT_2", dtype=object, shape=(-1,)), - ], - outputs=[ - TensorSpec(name="OUTPUT_1", dtype=np.int32, shape=(1000,)), - ], - ) - generator = ModelConfigGenerator(model_config) - - model_config_data = {} - generator._set_model_signature(model_config_data) - - assert model_config_data == { - "input": [ - {"name": "INPUT_1", "data_type": "TYPE_FP32", "dims": [-1]}, - {"name": "INPUT_2", "data_type": "TYPE_STRING", "dims": [-1]}, - ], - "output": [ - {"name": "OUTPUT_1", "data_type": "TYPE_INT32", "dims": [1000]}, - ], - } - - -def test_set_model_signature_raise_error_when_output_marked_as_optional(): - model_config = TritonModelConfig( - model_name="simple", - inputs=[ - TensorSpec(name="INPUT_1", dtype=np.float32, shape=(-1,)), - TensorSpec(name="INPUT_2", dtype=object, shape=(-1,)), - ], - outputs=[ - TensorSpec(name="OUTPUT_1", dtype=np.int32, shape=(1000,), optional=True), - ], - ) - generator = ModelConfigGenerator(model_config) - - model_config_data = {} - with pytest.raises( - PyTritonBadParameterError, - match="Optional flag for outputs is not supported. Outputs marked as optional: OUTPUT_1.", - ): - generator._set_model_signature(model_config_data) - - -def test_get_config_call_config_generator_methods(mocker): - mock_set_batching = mocker.patch.object(ModelConfigGenerator, "_set_batching") - mock_set_model_signature = mocker.patch.object(ModelConfigGenerator, "_set_model_signature") - mock_set_instance_group = mocker.patch.object(ModelConfigGenerator, "_set_instance_group") - mock_set_backend_parameters = mocker.patch.object(ModelConfigGenerator, "_set_backend_parameters") - - model_config = TritonModelConfig(model_name="simple") - - generator = ModelConfigGenerator(model_config) - model_config_data = generator.get_config() - - assert model_config_data == { - "name": "simple", - "backend": "python", - } - - assert mock_set_batching.called is True - assert mock_set_model_signature.called is True - assert mock_set_instance_group.called is True - assert mock_set_backend_parameters.called is True - - -def test_get_config_return_defaults_when_minimal_config_passed(): - model_config = TritonModelConfig(model_name="simple") - - generator = ModelConfigGenerator(model_config) - model_config_data = generator.get_config() - - assert model_config_data == { - "name": "simple", - "backend": "python", - "max_batch_size": 4, - } - - -def test_get_config_return_response_cache_when_enabled_for_model(): - model_config = TritonModelConfig(model_name="simple", response_cache=ResponseCache(enable=True)) - - generator = ModelConfigGenerator(model_config) - model_config_data = generator.get_config() - - assert model_config_data == { - "name": "simple", - "backend": "python", - "max_batch_size": 4, - "response_cache": {"enable": True}, - } - - -def test_get_config_return_response_cache_when_disabled_for_model(): - model_config = TritonModelConfig(model_name="simple", response_cache=ResponseCache(enable=False)) - - generator = ModelConfigGenerator(model_config) - model_config_data = generator.get_config() - - assert model_config_data == { - "name": "simple", - "backend": "python", - "max_batch_size": 4, - "response_cache": {"enable": False}, - } - - -def test_to_file_save_config_to_file_and_override_max_batch_size_when_batching_disabled(mocker): - mock_get_config = mocker.patch.object(ModelConfigGenerator, "get_config") - mock_get_config.return_value = { - "name": "simple", - "backend": "python", - "max_batch_size": 0, - "dynamic_batching": {}, - } - - model_config = TritonModelConfig(model_name="simple") - generator = ModelConfigGenerator(model_config) - - with tempfile.NamedTemporaryFile() as fp: - generator.to_file(fp.name) - - config_path = pathlib.Path(fp.name) - assert config_path.exists() is True - - data = _load_config(config_path) - - assert data == { - "name": "simple", - "backend": "python", - "dynamic_batching": {}, - } - - -@pytest.mark.xfail(reason="New client generation is not yet implemented") -def test_to_file_save_config_to_file_when_full_config_specified(): - generator = ModelConfigGenerator(full_model_config) - - with tempfile.NamedTemporaryFile() as fp: - generator.to_file(fp.name) - - config_path = pathlib.Path(fp.name) - assert config_path.exists() is True - - data = _load_config(config_path) - - assert data == { - "name": "simple", - "backend": "python", - "max_batch_size": 16, - "dynamic_batching": { - "preferred_batch_size": [16, 32], - "max_queue_delay_microseconds": "100", - "preserve_ordering": True, - "priority_levels": "3", - "default_priority_level": "1", - "default_queue_policy": { - "allow_timeout_override": True, - "timeout_action": "DELAY", - "default_timeout_microseconds": "100", - "max_queue_size": 2, - }, - "priority_queue_policy": { - "2": { - "allow_timeout_override": True, - "timeout_action": "DELAY", - "default_timeout_microseconds": "100", - "max_queue_size": 3, - } - }, - }, - "instance_group": [ - { - "count": 1, - "kind": "KIND_CPU", - }, - { - "count": 2, - "kind": "KIND_GPU", - }, - ], - "input": [ - {"name": "INPUT_1", "data_type": "TYPE_FP32", "dims": ["-1"]}, - {"name": "INPUT_2", "data_type": "TYPE_STRING", "dims": ["-1"]}, - ], - "output": [ - {"name": "OUTPUT_1", "data_type": "TYPE_INT32", "dims": ["1000"]}, - ], - "parameters": { - "parameter1": {"string_value": "value1"}, - "parameter2": {"string_value": "value2"}, - }, - "response_cache": {"enable": True}, - "model_transaction_policy": {"decoupled": True}, - } diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_model_config_parser.py b/stf/stf-api-alternative/pytriton/tests/unit/test_model_config_parser.py deleted file mode 100644 index 7b49857ce7dce4e09e3e5941d3c68d65114925b8..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_model_config_parser.py +++ /dev/null @@ -1,542 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import pathlib - -import numpy as np -import pytest - -from pytriton.exceptions import PyTritonModelConfigError -from pytriton.model_config.common import DeviceKind, DynamicBatcher, TimeoutAction -from pytriton.model_config.parser import ModelConfigParser -from pytriton.model_config.triton_model_config import ResponseCache, TensorSpec - -from .common import full_model_config - -common_model_config = { - "backend": "python", - "instance_group": [{"kind": "KIND_CPU"}], - "parameters": {"shared-memory-socket": {"string_value": "ipc:///tmp/proxy_backend.ipc"}}, -} - -invalid_model_config = { - **common_model_config, - **{ - "name": "minimal", - "input": [ - {"data_type": "TYPE_FLOAT32", "dims": ["-1"], "name": "INPUT_0"}, - ], - "output": [ - {"data_type": "INT32", "dims": ["-1"], "name": "OUTPUT_0"}, - ], - }, -} - -minimal_model_config = { - **common_model_config, - **{ - "name": "minimal", - "input": [ - {"data_type": "TYPE_INT32", "dims": ["-1"], "name": "INPUT_0"}, - ], - "output": [ - {"data_type": "TYPE_INT32", "dims": ["-1"], "name": "OUTPUT_0"}, - ], - }, -} - -minimal_response_model_config = { - **common_model_config, - **{ - "name": "minimal", - "input": [ - {"data_type": "TYPE_INT32", "dims": ["-1"], "name": "INPUT_0"}, - ], - "output": [ - {"data_type": "TYPE_INT32", "dims": ["-1"], "name": "OUTPUT_0"}, - ], - "response_cache": {"enable": True}, - }, -} - -simple_add_model_config = { - **common_model_config, - **{ - "name": "add", - "max_batch_size": 16, - "input": [ - {"data_type": "TYPE_INT32", "dims": ["-1"], "name": "INPUT_0"}, - {"data_type": "TYPE_FP32", "dims": ["-1"], "name": "INPUT_1"}, - ], - "output": [ - {"data_type": "TYPE_INT32", "dims": ["-1"], "name": "OUTPUT_0"}, - ], - "dynamic_batching": {}, - }, -} - -string_model_config = { - **common_model_config, - **{ - "name": "string", - "max_batch_size": 16, - "input": [ - {"data_type": "TYPE_STRING", "dims": ["-1"], "name": "INPUT_0"}, - ], - "output": [ - {"data_type": "TYPE_STRING", "dims": ["-1"], "name": "OUTPUT_0"}, - ], - }, -} - -add_model_config_with_model_not_supporting_batching = { - **common_model_config, - # no max_batch_size and dynamic_batching keys - **{ - "name": "add", - "input": [ - {"data_type": "TYPE_INT32", "dims": ["1"], "name": "INPUT_0"}, - {"data_type": "TYPE_INT32", "dims": ["1"], "name": "INPUT_1"}, - ], - "output": [ - {"data_type": "TYPE_INT32", "dims": ["1"], "name": "OUTPUT_0"}, - ], - }, -} - -add_model_config_without_dynamic_batching = { - **common_model_config, - "max_batch_size": 16, - **{ - "name": "add", - "input": [ - {"data_type": "TYPE_INT32", "dims": ["1"], "name": "INPUT_0"}, - {"data_type": "TYPE_INT32", "dims": ["1"], "name": "INPUT_1"}, - ], - "output": [ - {"data_type": "TYPE_INT32", "dims": ["1"], "name": "OUTPUT_0"}, - ], - }, -} - -add_model_config_with_simple_dynamic_batching = { - **common_model_config, - **{ - "name": "add", - "max_batch_size": 16, - "input": [ - {"data_type": "TYPE_INT32", "dims": ["-1"], "name": "INPUT_0"}, - {"data_type": "TYPE_INT32", "dims": ["-1"], "name": "INPUT_1"}, - ], - "output": [ - {"data_type": "TYPE_INT32", "dims": ["-1"], "name": "OUTPUT_0"}, - ], - "dynamic_batching": { - "max_queue_delay_microseconds": 100, - "preferred_batch_size": [64, 128], - }, - }, -} - -add_model_config_with_advanced_dynamic_batching = { - **common_model_config, - **{ - "name": "add", - "max_batch_size": 16, - "input": [ - {"data_type": "TYPE_INT32", "dims": ["-1"], "name": "INPUT_0"}, - {"data_type": "TYPE_INT32", "dims": ["-1"], "name": "INPUT_1"}, - ], - "output": [ - {"data_type": "TYPE_INT32", "dims": ["-1"], "name": "OUTPUT_0"}, - ], - "dynamic_batching": { - "max_queue_delay_microseconds": 100, - "preferred_batch_size": [64, 128], - "preserve_ordering": True, - "response_cache": True, - "priority_levels": 2, - "default_priority_level": 1, - "default_queue_policy": { - "timeout_action": "DELAY", - "default_timeout_microseconds": 100, - "allow_timeout_override": True, - "max_queue_size": 10, - }, - "priority_queue_policy": { - 1: { - "timeout_action": "DELAY", - "default_timeout_microseconds": 100, - "allow_timeout_override": True, - "max_queue_size": 10, - }, - 2: { - "timeout_action": "REJECT", - "default_timeout_microseconds": 1000, - "allow_timeout_override": False, - "max_queue_size": 2, - }, - }, - }, - }, -} - - -def test_rewrite_io_spec_raise_error_when_empty_dict(): - with pytest.raises(PyTritonModelConfigError, match="Name for input at index 0 not provided."): - ModelConfigParser.rewrite_io_spec({}, io_type="input", idx=0) - - -def test_rewrite_io_spec_raise_error_when_no_data_type(): - with pytest.raises(PyTritonModelConfigError, match="Data type for input with name `input` not defined."): - ModelConfigParser.rewrite_io_spec({"name": "input"}, io_type="input", idx=0) - - -def test_rewrite_io_spec_raise_error_when_no_invalid_data_type(): - with pytest.raises( - PyTritonModelConfigError, - match="Invalid data type `FLOAT32` for input with name `input` not defined. The expected name is TYPE_{type}.", - ): - ModelConfigParser.rewrite_io_spec( - { - "name": "input", - "data_type": "FLOAT32", - }, - io_type="input", - idx=0, - ) - - -def test_rewrite_io_spec_raise_error_when_unsupported_data_type(): - with pytest.raises( - PyTritonModelConfigError, - match="Invalid data type `FLOAT32` for input with name `input` not defined. The expected name is TYPE_{type}.", - ): - ModelConfigParser.rewrite_io_spec( - { - "name": "input", - "data_type": "FLOAT32", - }, - io_type="input", - idx=0, - ) - - -def test_rewrite_io_spec_raise_error_when_no_dimension(): - with pytest.raises(PyTritonModelConfigError, match="Dimension for input with name `input` not defined."): - ModelConfigParser.rewrite_io_spec( - {"name": "input", "data_type": "TYPE_FP32", "dims": None}, - io_type="input", - idx=0, - ) - - -def test_rewrite_io_spec_return_tensor_spec_when_valid_data(): - tensor_spec = ModelConfigParser.rewrite_io_spec( - {"name": "input", "data_type": "TYPE_FP32", "dims": [1]}, - io_type="input", - idx=0, - ) - - assert tensor_spec == TensorSpec(name="input", dtype=np.float32, shape=(1,)) - - -def test_parse_from_dict_raise_error_when_invalid_data_type_in_config(): - with pytest.raises(PyTritonModelConfigError, match="Unsupported data type `FLOAT32` for input with name `INPUT_0`"): - ModelConfigParser.from_dict(model_config_dict=invalid_model_config) - - -def test_parse_from_dict_return_model_config_when_minimal_config_used(): - model_config = ModelConfigParser.from_dict(model_config_dict=minimal_model_config) - assert model_config.model_name == "minimal" - assert model_config.max_batch_size == 0 - assert model_config.batching is False - - assert len(model_config.instance_group) == 1 - - device_kind = list(model_config.instance_group.keys())[0] - device_count = list(model_config.instance_group.values())[0] - assert device_kind.value == DeviceKind.KIND_CPU.value - assert device_count is None - - assert not model_config.decoupled - - assert model_config.backend_parameters == {"shared-memory-socket": "ipc:///tmp/proxy_backend.ipc"} - assert model_config.inputs[0] == TensorSpec(name="INPUT_0", dtype=np.int32, shape=(-1,)) - assert model_config.outputs[0] == TensorSpec(name="OUTPUT_0", dtype=np.int32, shape=(-1,)) - - assert model_config.batcher is None - - -def test_parse_from_dict_return_model_config_when_simple_config_used(): - model_config = ModelConfigParser.from_dict(model_config_dict=simple_add_model_config) - assert model_config.model_name == "add" - assert model_config.max_batch_size == 16 - assert model_config.batching is True - - assert len(model_config.instance_group) == 1 - - device_kind = list(model_config.instance_group.keys())[0] - device_count = list(model_config.instance_group.values())[0] - assert device_kind.value == DeviceKind.KIND_CPU.value - assert device_count is None - - assert not model_config.decoupled - - assert model_config.backend_parameters == {"shared-memory-socket": "ipc:///tmp/proxy_backend.ipc"} - assert model_config.inputs == [ - TensorSpec(name="INPUT_0", dtype=np.int32, shape=(-1,)), - TensorSpec(name="INPUT_1", dtype=np.float32, shape=(-1,)), - ] - assert model_config.outputs == [TensorSpec(name="OUTPUT_0", dtype=np.int32, shape=(-1,))] - - assert model_config.batcher == DynamicBatcher() - - -def test_parse_from_dict_return_model_config_when_string_config_used(): - model_config = ModelConfigParser.from_dict(model_config_dict=string_model_config) - assert model_config.model_name == "string" - assert model_config.max_batch_size == 16 - assert model_config.batching is True - - assert len(model_config.instance_group) == 1 - - device_kind = list(model_config.instance_group.keys())[0] - device_count = list(model_config.instance_group.values())[0] - assert device_kind.value == DeviceKind.KIND_CPU.value - assert device_count is None - - assert not model_config.decoupled - - assert model_config.backend_parameters == {"shared-memory-socket": "ipc:///tmp/proxy_backend.ipc"} - assert model_config.inputs == [ - TensorSpec(name="INPUT_0", dtype=np.bytes_, shape=(-1,)), - ] - assert model_config.outputs == [TensorSpec(name="OUTPUT_0", dtype=np.bytes_, shape=(-1,))] - - assert model_config.batcher is None - - -def test_parse_from_dict_return_model_config_when_add_model_without_batching_used(): - model_config = ModelConfigParser.from_dict(model_config_dict=add_model_config_with_model_not_supporting_batching) - assert model_config.model_name == "add" - assert model_config.max_batch_size == 0 - assert model_config.batching is False - - assert len(model_config.instance_group) == 1 - - device_kind = list(model_config.instance_group.keys())[0] - device_count = list(model_config.instance_group.values())[0] - assert device_kind.value == DeviceKind.KIND_CPU.value - assert device_count is None - - assert not model_config.decoupled - - assert model_config.backend_parameters == {"shared-memory-socket": "ipc:///tmp/proxy_backend.ipc"} - assert model_config.inputs == [ - TensorSpec(name="INPUT_0", dtype=np.int32, shape=(1,)), - TensorSpec(name="INPUT_1", dtype=np.int32, shape=(1,)), - ] - assert model_config.outputs == [TensorSpec(name="OUTPUT_0", dtype=np.int32, shape=(1,))] - - assert model_config.batcher is None - - -def test_parse_from_dict_return_model_config_when_decoupled_execution_is_enabled(): - model_config_dict = { - **minimal_model_config, - **{ - "model_transaction_policy": {"decoupled": True}, - }, - } - - model_config = ModelConfigParser.from_dict(model_config_dict=model_config_dict) - assert model_config.decoupled - - -def test_parse_from_dict_return_model_config_when_decoupled_execution_is_explicitly_disabled(): - model_config_dict = { - **minimal_model_config, - **{ - "model_transaction_policy": {"decoupled": False}, - }, - } - - model_config = ModelConfigParser.from_dict(model_config_dict=model_config_dict) - assert not model_config.decoupled - - -def test_parse_from_dict_return_model_config_when_add_model_without_dynamic_batching_used(): - model_config = ModelConfigParser.from_dict(model_config_dict=add_model_config_without_dynamic_batching) - assert model_config.model_name == "add" - assert model_config.max_batch_size == 16 - assert model_config.batching is True - - assert len(model_config.instance_group) == 1 - - device_kind = list(model_config.instance_group.keys())[0] - device_count = list(model_config.instance_group.values())[0] - assert device_kind.value == DeviceKind.KIND_CPU.value - assert device_count is None - - assert not model_config.decoupled - - assert model_config.backend_parameters == {"shared-memory-socket": "ipc:///tmp/proxy_backend.ipc"} - assert model_config.inputs == [ - TensorSpec(name="INPUT_0", dtype=np.int32, shape=(1,)), - TensorSpec(name="INPUT_1", dtype=np.int32, shape=(1,)), - ] - assert model_config.outputs == [TensorSpec(name="OUTPUT_0", dtype=np.int32, shape=(1,))] - - assert model_config.batcher is None - - -def test_parse_from_dict_return_model_config_when_add_model_with_simple_dynamic_batching_used(): - model_config = ModelConfigParser.from_dict(model_config_dict=add_model_config_with_simple_dynamic_batching) - assert model_config.model_name == "add" - assert model_config.max_batch_size == 16 - assert model_config.batching is True - - assert len(model_config.instance_group) == 1 - - device_kind = list(model_config.instance_group.keys())[0] - device_count = list(model_config.instance_group.values())[0] - assert device_kind.value == DeviceKind.KIND_CPU.value - assert device_count is None - - assert not model_config.decoupled - - assert model_config.backend_parameters == {"shared-memory-socket": "ipc:///tmp/proxy_backend.ipc"} - assert model_config.inputs == [ - TensorSpec(name="INPUT_0", dtype=np.int32, shape=(-1,)), - TensorSpec(name="INPUT_1", dtype=np.int32, shape=(-1,)), - ] - assert model_config.outputs == [ - TensorSpec(name="OUTPUT_0", dtype=np.int32, shape=(-1,)), - ] - assert model_config.batcher.preferred_batch_size == [64, 128] - assert model_config.batcher.max_queue_delay_microseconds == 100 - assert model_config.batcher.preserve_ordering is False - - assert model_config.batcher.priority_levels == 0 - assert model_config.batcher.default_priority_level == 0 - assert model_config.batcher.default_queue_policy is None - assert model_config.batcher.priority_queue_policy is None - - -def test_parse_from_dict_return_model_config_when_add_model_with_advanced_batcher_used(): - model_config = ModelConfigParser.from_dict(model_config_dict=add_model_config_with_advanced_dynamic_batching) - assert model_config.model_name == "add" - assert model_config.max_batch_size == 16 - assert model_config.batching is True - - assert len(model_config.instance_group) == 1 - - device_kind = list(model_config.instance_group.keys())[0] - device_count = list(model_config.instance_group.values())[0] - assert device_kind.value == DeviceKind.KIND_CPU.value - assert device_count is None - - assert not model_config.decoupled - - assert model_config.backend_parameters == {"shared-memory-socket": "ipc:///tmp/proxy_backend.ipc"} - assert model_config.inputs == [ - TensorSpec(name="INPUT_0", dtype=np.int32, shape=(-1,)), - TensorSpec(name="INPUT_1", dtype=np.int32, shape=(-1,)), - ] - assert model_config.outputs == [ - TensorSpec(name="OUTPUT_0", dtype=np.int32, shape=(-1,)), - ] - assert model_config.batcher.preferred_batch_size == [64, 128] - assert model_config.batcher.max_queue_delay_microseconds == 100 - assert model_config.batcher.preserve_ordering is True - - assert model_config.batcher.priority_levels == 2 - assert model_config.batcher.default_priority_level == 1 - - assert model_config.batcher.default_queue_policy is not None - assert model_config.batcher.default_queue_policy.allow_timeout_override is True - assert model_config.batcher.default_queue_policy.default_timeout_microseconds == 100 - assert model_config.batcher.default_queue_policy.max_queue_size == 10 - assert model_config.batcher.default_queue_policy.timeout_action == TimeoutAction.DELAY - - assert model_config.batcher.priority_queue_policy is not None - assert len(model_config.batcher.priority_queue_policy) == 2 - - priority_queue_policy1 = model_config.batcher.priority_queue_policy[1] - assert priority_queue_policy1.allow_timeout_override is True - assert priority_queue_policy1.default_timeout_microseconds == 100 - assert priority_queue_policy1.max_queue_size == 10 - assert priority_queue_policy1.timeout_action == TimeoutAction.DELAY - - priority_queue_policy2 = model_config.batcher.priority_queue_policy[2] - assert priority_queue_policy2.allow_timeout_override is False - assert priority_queue_policy2.default_timeout_microseconds == 1000 - assert priority_queue_policy2.max_queue_size == 2 - assert priority_queue_policy2.timeout_action == TimeoutAction.REJECT - - -def test_parse_from_dict_return_model_config_when_response_cache_config_used(): - model_config = ModelConfigParser.from_dict(model_config_dict=minimal_response_model_config) - assert model_config.model_name == "minimal" - assert model_config.max_batch_size == 0 - assert model_config.batching is False - - assert len(model_config.instance_group) == 1 - - device_kind = list(model_config.instance_group.keys())[0] - device_count = list(model_config.instance_group.values())[0] - assert device_kind.value == DeviceKind.KIND_CPU.value - assert device_count is None - - assert not model_config.decoupled - - assert model_config.backend_parameters == {"shared-memory-socket": "ipc:///tmp/proxy_backend.ipc"} - assert model_config.inputs[0] == TensorSpec(name="INPUT_0", dtype=np.int32, shape=(-1,)) - assert model_config.outputs[0] == TensorSpec(name="OUTPUT_0", dtype=np.int32, shape=(-1,)) - - assert model_config.batcher is None - - assert model_config.response_cache == ResponseCache(enable=True) - - -def test_parse_from_file_raise_error_when_file_with_invalid_model_config_passed(): - config_path = pathlib.Path(__file__).parent.resolve() / "assets" / "invalid_config.pbtxt" - with pytest.raises(PyTritonModelConfigError): - ModelConfigParser.from_file(config_path=config_path) - - -def test_parse_from_file_return_model_config_when_file_with_valid_model_config_passed(): - config_path = pathlib.Path(__file__).parent.resolve() / "assets" / "valid_config.pbtxt" - model_config = ModelConfigParser.from_file(config_path=config_path) - - assert model_config.model_name == "simple" - assert model_config.backend == "python" - - assert model_config.max_batch_size == 8 - - assert model_config.inputs == [ - TensorSpec(name="INPUT0", dtype=np.int32, shape=(16,)), - TensorSpec(name="INPUT1", dtype=np.int32, shape=(16,)), - ] - assert model_config.outputs == [ - TensorSpec(name="OUTPUT0", dtype=np.int32, shape=(16,)), - TensorSpec(name="OUTPUT1", dtype=np.int32, shape=(16,)), - ] - - -def test_parse_from_file_return_model_config_when_file_with_full_supported_config_passed(): - config_path = pathlib.Path(__file__).parent.resolve() / "assets" / "full_config.pbtxt" - model_config = ModelConfigParser.from_file(config_path=config_path) - - assert model_config == full_model_config diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_model_proxy_communication.py b/stf/stf-api-alternative/pytriton/tests/unit/test_model_proxy_communication.py deleted file mode 100644 index 1c7d4ab70e878ba5f7f1ad457383a4092bfbf629..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_model_proxy_communication.py +++ /dev/null @@ -1,201 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import base64 -import json -import logging -import multiprocessing -import pathlib -import sys -import time -import traceback -from unittest.mock import Mock - -import numpy as np -import pytest -import zmq - -from pytriton.model_config.generator import ModelConfigGenerator -from pytriton.model_config.triton_model_config import TensorSpec, TritonModelConfig -from pytriton.proxy.communication import TensorStore -from pytriton.proxy.validators import TritonResultsValidator -from pytriton.triton import TRITONSERVER_DIST_DIR - -LOGGER = logging.getLogger("tests.test_model_error_handling") -logging.basicConfig( - format="%(asctime)s - %(levelname)s - %(name)s: %(message)s", - level=logging.INFO, -) - - -class Tensor: - def __init__(self, name, data): - self._name = name - self._data = data - - def name(self): - return self._name - - def as_numpy(self): - return self._data - - -class InferenceRequest: - def __init__(self, model_name, inputs, requested_output_names, parameters=None): - self.model_name = model_name - self._inputs = inputs - self.requested_output_names = requested_output_names - self._parameters = parameters or {} - - def inputs(self): - return self._inputs - - def parameters(self): - return json.dumps(self._parameters) - - def get_response_sender(self): - return None - - -def _error_infer_fn(*_, **__): - # Wrapper raises division by zero error - time.sleep(0.2) - return 2 / 0 - - -def _error_infer_gen_fn(*_, **__): - # Wrapper raises division by zero error - time.sleep(0.2) - raise RuntimeError("division by zero") - - -def _get_proxy_backend(mocker, model_config, shared_memory_socket, data_store_socket): - from pytriton.proxy.model import TritonPythonModel - - authkey = multiprocessing.current_process().authkey - authkey = base64.b64encode(authkey).decode("utf-8") - instance_data = { - "shared-memory-socket": shared_memory_socket, - "data-store-socket": data_store_socket, - "auth-key": authkey, - } - - mocker.patch.object(TritonPythonModel, "_get_instance_data", return_value=instance_data) - - model_config_json_payload = json.dumps(ModelConfigGenerator(model_config).get_config()).encode("utf-8") - backend_initialization_args = {"model_config": model_config_json_payload} - - backend_model = None - try: - backend_model = TritonPythonModel() - backend_model.initialize(backend_initialization_args) - return backend_model - except Exception: - if backend_model: - backend_model.finalize() - raise - - -@pytest.mark.parametrize( - "infer_fn,decoupled", - [ - (_error_infer_fn, False), - (_error_infer_gen_fn, True), - ], -) -def test_model_throws_exception(tmp_path, mocker, infer_fn, decoupled): - # add python backend folder to find triton_python_backend_utils from model.py - python_backend_path = TRITONSERVER_DIST_DIR / "backends" / "python" - sys.path.append(str(python_backend_path)) - - print("sys.path updated") # noqa: T201 - for entry in sys.path: - print(f" {entry}") # noqa: T201 - - try: - import triton_python_backend_utils as pb_utils # pytype: disable=import-error - - # add TritonModelException to pb_utils for test (python backend does this in C++ code) - pb_utils.TritonModelException = RuntimeError - pb_utils.Logger = Mock() - - from pytriton.proxy.inference_handler import InferenceHandler - from pytriton.utils.workspace import Workspace - - model_name = "model1" - workspace = Workspace(pathlib.Path(tmp_path) / "w") - ipc_socket_path = workspace.path / f"proxy_{model_name}.ipc" - shared_memory_socket = f"ipc://{ipc_socket_path.as_posix()}" - data_store_socket = (workspace.path / "data_store.socket").as_posix() - - model_config = TritonModelConfig( - model_name=model_name, - inputs=[TensorSpec(name="input1", dtype=np.float32, shape=(-1,))], - outputs=[TensorSpec(name="output1", dtype=np.float32, shape=(-1,))], - backend_parameters={"shared-memory-socket": shared_memory_socket}, - decoupled=decoupled, - ) - - zmq_context = zmq.Context() - - authkey = multiprocessing.current_process().authkey - tensor_store = TensorStore(data_store_socket, authkey) - tensor_store.start() - - backend_model = _get_proxy_backend(mocker, model_config, shared_memory_socket, data_store_socket) - - validator = TritonResultsValidator(model_config, strict=False) - inference_handler = InferenceHandler( - infer_fn, - model_config, - shared_memory_socket=shared_memory_socket, - data_store_socket=data_store_socket, - zmq_context=zmq_context, - validator=validator, - ) - inference_handler.start() - - requests = [ - InferenceRequest( - model_name=model_name, - inputs=[Tensor("input1", np.array([[1, 2, 3]], dtype=np.float32))], - requested_output_names=["output1"], - ), - ] - - try: - result = backend_model.execute(requests) - pytest.fail(f"Model raised exception, but exec_batch passed - result: {result}") - except pb_utils.TritonModelException: # pytype: disable=module-attr - LOGGER.info("Inference exception") - msg = traceback.format_exc() - LOGGER.info(msg) - assert "division by zero" in msg - except Exception: - msg = traceback.format_exc() - pytest.fail(f"Wrong exception raised: {msg}") - finally: - zmq_context.term() - inference_handler.stop() - backend_model.finalize() - tensor_store.close() - - finally: - sys.path.pop() - if "pb_utils" in locals() and hasattr(pb_utils, "TritonModelException"): # pytype: disable=name-error - delattr(pb_utils, "TritonModelException") # pytype: disable=name-error - - print("sys.path cleaned-up") # noqa: T201 - for entry in sys.path: - print(f" {entry}") # noqa: T201 diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_models_manager.py b/stf/stf-api-alternative/pytriton/tests/unit/test_models_manager.py deleted file mode 100644 index 4d9c254175c86a4c72c037caed95c36a97d7c290..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_models_manager.py +++ /dev/null @@ -1,95 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from typing import Iterable -from unittest.mock import Mock - -import pytest - -from pytriton.exceptions import PyTritonInvalidOperationError -from pytriton.models.manager import ModelManager - - -def _match_length(models: Iterable, length: int) -> bool: - items = [] - for m in models: - items.append(m) - - return len(items) == length - - -def test_add_model_store_models_in_registry_when_models_have_different_names(): - model1 = Mock(model_name="Test1", model_version=1) - model2 = Mock(model_name="Test2", model_version=1) - - model_manager = ModelManager(triton_url="") - model_manager.add_model(model1) - model_manager.add_model(model2) - - assert _match_length(model_manager.models, 2) is True - - -def test_add_model_store_models_in_registry_when_models_have_different_versions(): - model1 = Mock(model_name="Test1", model_version=1) - model2 = Mock(model_name="Test1", model_version=2) - - model_manager = ModelManager(triton_url="") - model_manager.add_model(model1) - model_manager.add_model(model2) - - assert _match_length(model_manager.models, 2) is True - - -def test_add_model_raise_error_when_models_have_same_names_and_versions(): - model1 = Mock(model_name="Test", model_version=1) - model2 = Mock(model_name="Test", model_version=1) - - model_manager = ModelManager(triton_url="") - model_manager.add_model(model1) - - with pytest.raises(PyTritonInvalidOperationError, match="Cannot add model with the same name twice."): - model_manager.add_model(model2) - - -def test_create_models_call_model_generate_and_setup_when_models_added(mocker): - model1 = Mock(model_name="Test1", model_version=1) - model2 = Mock(model_name="Test2", model_version=1) - mocker.patch.object(model1, "is_alive").return_value = False - mocker.patch.object(model2, "is_alive").return_value = False - - model_manager = ModelManager(triton_url="") - load_model_method = mocker.patch.object(model_manager, "_load_model") - - model_manager.add_model(model1) - model_manager.add_model(model2) - model_manager.load_models() - assert load_model_method.call_count == 2 - - -def test_clean_call_clean_on_each_model_and_remove_models_from_registry_when_models_added(): - model1 = Mock(model_name="Test1", model_version=1) - model2 = Mock(model_name="Test2", model_version=1) - - model_manager = ModelManager(triton_url="localhost") - - model_manager.add_model(model1) - model_manager.add_model(model2) - - assert _match_length(model_manager.models, 2) is True - - model_manager.clean() - - assert model1.clean.called is True - assert model2.clean.called is True - - assert _match_length(model_manager.models, 0) is True diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_proxy_inference_handler.py b/stf/stf-api-alternative/pytriton/tests/unit/test_proxy_inference_handler.py deleted file mode 100644 index fee81f8d7bc084239a1db024439970c5337841c2..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_proxy_inference_handler.py +++ /dev/null @@ -1,191 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import dataclasses -import functools -import json -import logging -import time - -import numpy as np -import pytest -import zmq - -from pytriton.exceptions import PyTritonRuntimeError -from pytriton.model_config.triton_model_config import TensorSpec, TritonModelConfig -from pytriton.proxy.communication import InferenceHandlerRequests, MetaRequestResponse, TensorStore -from pytriton.proxy.inference_handler import InferenceHandler, _ResponsesIterator -from pytriton.proxy.types import Request -from pytriton.proxy.validators import TritonResultsValidator -from tests.unit.utils import verify_equalness_of_dicts_with_ndarray - -LOGGER = logging.getLogger("tests.unit.test_proxy_inference_handler") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - - -MODEL_CONFIG = TritonModelConfig( - model_name="Foo", - inputs=[ - TensorSpec(name="input1", dtype=np.int32, shape=(3,)), - TensorSpec(name="input2", dtype=np.int32, shape=(3,)), - ], - outputs=[ - TensorSpec(name="output1", dtype=np.int32, shape=(3,)), - TensorSpec(name="output2", dtype=np.int32, shape=(3,)), - ], -) - -DECOUPLED_MODEL_CONFIG = dataclasses.replace(MODEL_CONFIG, decoupled=True) - - -def _infer_fn(*_, **__): - return [ - {"output1": np.array([1, 2, 3]), "output2": np.array([1, 2, 3])}, - {"output1": np.array([1, 2, 3]), "output2": np.array([1, 2, 3])}, - ] - - -def _infer_gen_fn(*_, **__): - yield [ - {"output1": np.array([1]), "output2": np.array([1])}, - {"output1": np.array([1]), "output2": np.array([1])}, - ] - yield [ - {"output1": np.array([2]), "output2": np.array([2])}, - {"output1": np.array([2]), "output2": np.array([2])}, - ] - yield [ - {"output1": np.array([3]), "output2": np.array([3])}, - {"output1": np.array([3]), "output2": np.array([3])}, - ] - - -def _get_meta_requests_payload(_data_store_socket): - tensor_store = TensorStore(_data_store_socket) - LOGGER.debug(f"Connecting to tensor store {_data_store_socket} ...") - tensor_store.connect() # to already started tensor store - requests = [ - Request({"input1": np.ones((128, 4), dtype="float32"), "input2": np.ones((128, 4), dtype="float32")}), - Request({"input1": np.ones((128, 4), dtype="float32"), "input2": np.ones((128, 4), dtype="float32")}), - ] - input_arrays_with_coords = [ - (request_idx, input_name, tensor) - for request_idx, request in enumerate(requests) - for input_name, tensor in request.items() - ] - LOGGER.debug("Putting tensors to tensor store ...") - tensor_ids = tensor_store.put([tensor for _, _, tensor in input_arrays_with_coords]) - requests_with_ids = [{}] * len(requests) - for (request_idx, input_name, _), tensor_id in zip(input_arrays_with_coords, tensor_ids): - requests_with_ids[request_idx][input_name] = tensor_id - - meta_requests = InferenceHandlerRequests( - requests=[ - MetaRequestResponse(idx, data=request_with_ids, parameters=request.parameters) - for idx, (request, request_with_ids) in enumerate(zip(requests, requests_with_ids)) - ] - ) - LOGGER.debug(f"Return meta requests: {meta_requests}") - return meta_requests.as_bytes() - - -@pytest.mark.parametrize( - "infer_fn,expected_response_lists,decoupled", - [ - (_infer_fn, [_infer_fn()], False), - (_infer_fn, [_infer_fn()], True), # non-generator output should be also handled in decoupled mode - (_infer_gen_fn, _infer_gen_fn(), True), - ], -) -def test_responses_iterator(infer_fn, expected_response_lists, decoupled): - responses = list(_ResponsesIterator(infer_fn(), decoupled=decoupled)) - for responses_list, expected_response_list in zip(responses, expected_response_lists): - assert len(responses_list) == len(expected_response_list) - for response, expected_response in zip(responses_list, expected_response_list): - verify_equalness_of_dicts_with_ndarray(response, expected_response) - - -def test_responses_iterator_should_raise_error_when_generator_is_returned_for_nondecoupled_models(): - with pytest.raises(PyTritonRuntimeError, match="Results generator is not supported for non-decoupled models."): - list(_ResponsesIterator(_infer_gen_fn(), decoupled=False)) - - -def test_responses_iterator_could_iterate_only_once_on_non_generator_data(): - # it is usable to ensure that results are not consumed twice - - iterator = _ResponsesIterator(_infer_fn()) - responses1 = list(iterator) - responses2 = list(iterator) - - assert len(responses1) == 1 - assert len(responses2) == 0 - - -@pytest.mark.parametrize( - "triton_model_config,infer_fn", - [ - (MODEL_CONFIG, _infer_fn), - (DECOUPLED_MODEL_CONFIG, _infer_gen_fn), - ], -) -def test_proxy_throws_exception_when_validate_outputs_raise_an_error(tmp_path, mocker, triton_model_config, infer_fn): - zmq_context = None - inference_handler = None - - # simulate tensor store started by proxy backend - data_store_socket = (tmp_path / "data_store.socket").as_posix() - tensor_store = TensorStore(data_store_socket) # authkey will be taken from current process - try: - tensor_store.start() # start tensor store side process - this way InferenceHandler will create client for it - mocker.patch( - "pytriton.proxy.validators.TritonResultsValidator.validate_responses", - side_effect=ValueError("Validate outputs error."), - ) - zmq_context = zmq.Context() - validator = TritonResultsValidator(triton_model_config, strict=False) - inference_handler = InferenceHandler( - infer_fn, - triton_model_config, - shared_memory_socket=f"ipc://{tmp_path}/my", - data_store_socket=data_store_socket, - zmq_context=zmq_context, - validator=validator, - ) - - mock_recv = mocker.patch.object(inference_handler.zmq_context._socket_class, "recv") - mock_recv.side_effect = functools.partial(_get_meta_requests_payload, data_store_socket) - - mocker.patch.object(inference_handler.zmq_context._socket_class, "send") # do not send anything - spy_send = mocker.spy(inference_handler.zmq_context._socket_class, "send") - - inference_handler.start() - - timeout_s = 1.0 - start_s = time.time() - while not spy_send.called and time.time() - start_s < timeout_s: - time.sleep(0.1) - - spy_send.assert_called() - last_call = spy_send.mock_calls[-1] - response_payload = last_call.args[0] - response = json.loads(response_payload) - error = response.get("error") - assert error is not None and "Validate outputs error." in error - finally: - if inference_handler: - inference_handler.stop() - if inference_handler.is_alive(): - inference_handler.join(timeout=1) - tensor_store.close() - if zmq_context: - zmq_context.term() diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_proxy_validators.py b/stf/stf-api-alternative/pytriton/tests/unit/test_proxy_validators.py deleted file mode 100644 index 6e4328d5852783491892d7ab33d9da5859b2d971..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_proxy_validators.py +++ /dev/null @@ -1,259 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging - -import numpy as np -import pytest - -from pytriton.model_config.triton_model_config import TensorSpec, TritonModelConfig -from pytriton.proxy.validators import _validate_output_data, _validate_output_dtype_and_shape, _validate_outputs - -LOGGER = logging.getLogger("tests.unit.test_proxy_validators") -logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s: %(message)s") - -MY_MODEL_CONFIG = TritonModelConfig( - model_name="Foo", - inputs=[ - TensorSpec(name="input1", dtype=np.int32, shape=(3,)), - TensorSpec(name="input2", dtype=np.int32, shape=(3,)), - ], - outputs=[ - TensorSpec(name="output1", dtype=np.int32, shape=(3,)), - TensorSpec(name="output2", dtype=np.int32, shape=(3,)), - ], -) - -MY_MODEL_OUTPUTS = {output.name: output for output in MY_MODEL_CONFIG.outputs} - - -def test_validate_outputs_throws_exception_when_outputs_is_not_a_list(): - outputs = {"output1": np.array([1, 2, 3]), "output2": np.array([1, 2, 3])} - - with pytest.raises( - ValueError, - match=r"Outputs returned by `Foo` model callable must be list of response dicts with numpy arrays", - ): - _validate_outputs( - model_config=MY_MODEL_CONFIG, - model_outputs=MY_MODEL_OUTPUTS, - outputs=outputs, - strict=False, - requests_number=1, - ) - - -def test_validate_outputs_throws_exception_when_outputs_number_is_not_equal_to_requests_number(): - outputs = [{"output1": np.array([1, 2, 3]), "output2": np.array([1, 2, 3])}] - - with pytest.raises( - ValueError, - match=r"Number of outputs returned by `Foo` inference callable " - r"\(1\) does not match number of requests \(2\) received from Triton\.", - ): - _validate_outputs( - model_config=MY_MODEL_CONFIG, - model_outputs=MY_MODEL_OUTPUTS, - outputs=outputs, - strict=False, - requests_number=2, - ) - - -def test_validate_outputs_throws_exception_when_outputs_is_not_a_list_of_dicts(): - outputs = [np.array([1, 2, 3]), np.array([1, 2, 3])] - - with pytest.raises( - ValueError, - match=r"Outputs returned by `Foo` model callable must be list of response dicts with numpy arrays", - ): - _validate_outputs( - model_config=MY_MODEL_CONFIG, - model_outputs=MY_MODEL_OUTPUTS, - outputs=outputs, - strict=False, - requests_number=len(outputs), - ) - - -def test_validate_outputs_call_validate_outputs_data_if_strict_is_false(mocker): - outputs = [{"output1": np.array([1, 2, 3]), "output2": np.array([1, 2, 3])}] - mock_validate_outputs_data = mocker.patch("pytriton.proxy.validators._validate_output_data") - mock_validate_output_dtype_and_shape = mocker.patch("pytriton.proxy.validators._validate_output_dtype_and_shape") - - _validate_outputs( - model_config=MY_MODEL_CONFIG, - model_outputs=MY_MODEL_OUTPUTS, - outputs=outputs, - strict=False, - requests_number=len(outputs), - ) - - assert mock_validate_outputs_data.called is True - assert mock_validate_output_dtype_and_shape.called is False - - -def test_validate_outputs_call_validate_outputs_data_if_strict_is_true(mocker): - outputs = [{"output1": np.array([1, 2, 3]), "output2": np.array([1, 2, 3])}] - mock_validate_outputs_data = mocker.patch("pytriton.proxy.validators._validate_output_data") - mock_validate_output_dtype_and_shape = mocker.patch("pytriton.proxy.validators._validate_output_dtype_and_shape") - - _validate_outputs( - model_config=MY_MODEL_CONFIG, - model_outputs=MY_MODEL_OUTPUTS, - outputs=outputs, - strict=True, - requests_number=len(outputs), - ) - - assert mock_validate_outputs_data.called is True - assert mock_validate_output_dtype_and_shape.called is True - - -def test_validate_output_data_throws_exception_when_name_is_not_a_string(): - name = 12 - value = [1.0, 2.0, 3.0] - - with pytest.raises( - ValueError, - match=r"Not all keys returned by `Foo` model callable are string", - ): - _validate_output_data(model_config=MY_MODEL_CONFIG, name=name, value=value) - - -def test_validate_output_data_throws_exception_when_value_is_not_numpy_array(): - name = "output1" - value = [1.0, 2.0, 3.0] - - with pytest.raises( - ValueError, - match=r"Not all values returned by `Foo` model callable are numpy arrays", - ): - _validate_output_data(model_config=MY_MODEL_CONFIG, name=name, value=value) - - -def test_validate_output_data_throws_exception_when_value_is_not_supported_data_type(): - name = "output1" - value = np.array(["2000-01-01T12:00:00.000", "2002-01-01T12:00:00.000"], dtype="datetime64[ms]") - - with pytest.raises( - ValueError, - match=r"Only bool, numeric, string, unicode and object arrays " - r"are supported by Triton \(dtype\.kind: biufOSU\)\. " - "Returned `output1` for model `Foo` " - r"has `M` dtype\.kind\.", - ): - _validate_output_data(model_config=MY_MODEL_CONFIG, name=name, value=value) - - -def test_validate_output_data_throws_exception_when_value_is_list_of_strings(): - name = "output1" - value = np.array(["abcd", "efgg"], dtype=np.object_) - - with pytest.raises( - ValueError, - match=r"Use string/byte-string instead of object for passing string in NumPy array from model `Foo`\.", - ): - _validate_output_data(model_config=MY_MODEL_CONFIG, name=name, value=value) - - -def test_validate_output_data_throws_exception_when_value_is_list_of_ints_defined_as_object(): - name = "output1" - value = np.array([123, 456], dtype=np.object_) - - with pytest.raises( - ValueError, - match=r"Only bytes as objects dtype are supported by PyTriton\. " - "Returned `output1` from `Foo` " - r"has `\` type\.", - ): - _validate_output_data(model_config=MY_MODEL_CONFIG, name=name, value=value) - - -def test_validate_output_dtype_and_shape_throws_exception_when_name_not_in_model_config(): - name = "output3" - value = np.array([[1.0, 2.0]], dtype=np.int32) - - with pytest.raises( - ValueError, - match=r"Returned output `output3` is not defined in model config for model `Foo`\.", - ): - _validate_output_dtype_and_shape( - model_config=MY_MODEL_CONFIG, model_outputs=MY_MODEL_OUTPUTS, name=name, value=value - ) - - -def test_validate_output_dtype_and_shape_throws_exception_when_value_has_incorrect_dtype_and_float_returned(): - name = "output1" - value = np.array([[1.0], [2.0], [3.0]], dtype=float) - - with pytest.raises( - ValueError, - match=r"Returned output `output1` for model `Foo` has invalid type\. Returned: float64 \(f\). Expected: \\.", - ): - _validate_output_dtype_and_shape( - model_config=MY_MODEL_CONFIG, model_outputs=MY_MODEL_OUTPUTS, name=name, value=value - ) - - -def test_validate_output_dtype_and_shape_throws_exception_when_value_has_incorrect_dtype_and_bytes_returned(): - name = "output1" - value = np.array([b"test1", b"test2", b"test3"], dtype=np.bytes_) - - with pytest.raises( - ValueError, - match=r"Returned output `output1` for model `Foo` has invalid type\. Returned: \|S5 \(S\). Expected: \\.", - ): - _validate_output_dtype_and_shape( - model_config=MY_MODEL_CONFIG, model_outputs=MY_MODEL_OUTPUTS, name=name, value=value - ) - - -def test_validate_output_dtype_and_shape_throws_exception_when_value_has_incorrect_shape(): - name = "output1" - value = np.array([[[1], [2]], [[3], [4]]], dtype=np.int32) - - with pytest.raises( - ValueError, - match=r"Returned output `output1` for model `Foo` has invalid shapes\. Returned: \(2, 1\)\. Expected: \(3,\)\.", - ): - _validate_output_dtype_and_shape( - model_config=MY_MODEL_CONFIG, model_outputs=MY_MODEL_OUTPUTS, name=name, value=value - ) - - -def test_validate_output_dtype_and_shape_throws_exception_when_value_contains_too_little_items(): - name = "output1" - value = np.array([[1.0, 2.0]], dtype=np.int32) - - with pytest.raises( - ValueError, - match=r"Returned output `output1` for model `Foo` has invalid shapes at one or more positions\. Returned: \(2,\)\. Expected: \(3,\)\.", - ): - _validate_output_dtype_and_shape( - model_config=MY_MODEL_CONFIG, model_outputs=MY_MODEL_OUTPUTS, name=name, value=value - ) - - -def test_validate_output_dtype_and_shape_throws_exception_when_value_contains_too_many_items(): - name = "output2" - value = np.array([[1.0, 2.0, 3.0, 4.0]], dtype=np.int32) - - with pytest.raises( - ValueError, - match=r"Returned output `output2` for model `Foo` has invalid shapes at one or more positions\. Returned: \(4,\)\. Expected: \(3,\)\.", - ): - _validate_output_dtype_and_shape( - model_config=MY_MODEL_CONFIG, model_outputs=MY_MODEL_OUTPUTS, name=name, value=value - ) diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_sync_client.py b/stf/stf-api-alternative/pytriton/tests/unit/test_sync_client.py deleted file mode 100644 index 6b4b0c0bd20f3dd8602af7803b548fa36a6b6c7f..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_sync_client.py +++ /dev/null @@ -1,999 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import gc -import logging -import threading -import time - -import numpy as np -import pytest -import tritonclient.grpc -import tritonclient.http - -from pytriton.client import ModelClient -from pytriton.client.exceptions import ( - PyTritonClientClosedError, - PyTritonClientInvalidUrlError, - PyTritonClientModelDoesntSupportBatchingError, - PyTritonClientTimeoutError, - PyTritonClientValueError, -) -from pytriton.client.utils import _DEFAULT_NETWORK_TIMEOUT_S -from pytriton.model_config import DeviceKind -from pytriton.model_config.triton_model_config import TensorSpec, TritonModelConfig - -from .utils import ( - extract_array_from_grpc_infer_input, - extract_array_from_http_infer_input, - patch_grpc_client__model_up_and_ready, - patch_grpc_client__server_up_and_ready, - patch_http_client__model_up_and_ready, - patch_http_client__server_up_and_ready, - verify_equalness_of_dicts_with_ndarray, - wrap_to_grpc_infer_result, - wrap_to_http_infer_result, -) - -logging.basicConfig(level=logging.DEBUG) -LOGGER = logging.getLogger("test_sync_client") - -ADD_SUB_WITH_BATCHING_MODEL_CONFIG = TritonModelConfig( - model_name="AddSub", - model_version=1, - max_batch_size=16, - instance_group={DeviceKind.KIND_CPU: 1}, - inputs=[ - TensorSpec(name="a", shape=(-1, 1), dtype=np.float32), - TensorSpec(name="b", shape=(-1, 1), dtype=np.float32), - ], - outputs=[ - TensorSpec(name="add", shape=(-1, 1), dtype=np.float32), - TensorSpec(name="sub", shape=(-1, 1), dtype=np.float32), - ], - backend_parameters={"shared-memory-socket": "dummy/path"}, -) - -ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG = TritonModelConfig( - model_name="AddSub", - model_version=1, - batching=False, - instance_group={DeviceKind.KIND_CPU: 1}, - inputs=[ - TensorSpec(name="a", shape=(1,), dtype=np.float32), - TensorSpec(name="b", shape=(1,), dtype=np.float32), - ], - outputs=[ - TensorSpec(name="add", shape=(1,), dtype=np.float32), - TensorSpec(name="sub", shape=(1,), dtype=np.float32), - ], - backend_parameters={"shared-memory-socket": "dummy/path"}, -) - -_GRPC_LOCALHOST_URL = "grpc://localhost:8001" -_HTTP_LOCALHOST_URL = "http://localhost:8000" - - -EXPECTED_KWARGS_HTTP_DEFAULT = { - "model_name": ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name, - "model_version": "", - "request_id": "0", - "parameters": None, - "headers": None, -} # Network timeout is passed to __init__ for client and applied to all network requests for HTTP sync client - -EXPECTED_KWARGS_GRPC_DEFAULT = { - **dict(EXPECTED_KWARGS_HTTP_DEFAULT.items()), - "client_timeout": 60.0, # Network timeout shall be passed always for GRPC sync client -} - - -def test_sync_client_not_raise_error_when_valid_url(): - ModelClient("localhost", "dummy") - ModelClient("localhost:8000", "dummy") - ModelClient("http://localhost", "dummy") - ModelClient("http://localhost:8000", "dummy") - ModelClient("grpc://localhost", "dummy") - ModelClient("grpc://localhost:8001", "dummy") - - -def test_sync_client_init_raises_error_when_invalid_url_provided(): - with pytest.raises(PyTritonClientInvalidUrlError, match="Invalid url"): - ModelClient(["localhost:8001"], "dummy") # pytype: disable=wrong-arg-types - - with pytest.raises(PyTritonClientInvalidUrlError, match="Invalid url"): - ModelClient("https://localhost:8000", "dummy") - - with pytest.raises(PyTritonClientInvalidUrlError, match="Invalid url"): - ModelClient("invalid_scheme://localhost", "dummy") - - with pytest.raises(PyTritonClientInvalidUrlError, match="Invalid url"): - ModelClient("http://localhost:foo", "dummy") - - -def test_sync_grpc_client_init_raises_error_when_use_non_lazy_init_on_non_responding_server(): - with pytest.raises(PyTritonClientTimeoutError, match="Waiting for (.*) to be ready timed out."): - ModelClient("dummy:43299", "dummy", lazy_init=False, init_timeout_s=1) - - -def test_sync_grpc_client_init_raises_error_when_requested_unavailable_model_and_non_lazy_init_called(mocker): - from tritonclient.grpc import service_pb2 - - patch_grpc_client__server_up_and_ready(mocker) - mock_get_repo_index = mocker.patch.object(tritonclient.grpc.InferenceServerClient, "get_model_repository_index") - mock_get_repo_index.return_value = service_pb2.RepositoryIndexResponse( - models=[ - service_pb2.RepositoryIndexResponse.ModelIndex(name="OtherName", version="1", state="READY", reason=""), - ] - ) - mocker.patch.object(tritonclient.grpc.InferenceServerClient, "is_model_ready").return_value = False - - with pytest.raises(PyTritonClientTimeoutError, match="Waiting for model (.*) to be ready timed out."): - ModelClient(_GRPC_LOCALHOST_URL, "NotExistentModel", lazy_init=False, init_timeout_s=1.5) - - with pytest.raises(PyTritonClientTimeoutError, match="Waiting for model (.*) to be ready timed out."): - ModelClient(_GRPC_LOCALHOST_URL, "OtherName", "2", lazy_init=False, init_timeout_s=1.5) - - -def test_sync_grpc_client_init_obtain_expected_model_config_when_lazy_init_is_disabled(mocker): - patch_grpc_client__server_up_and_ready(mocker) - patch_grpc_client__model_up_and_ready(mocker, ADD_SUB_WITH_BATCHING_MODEL_CONFIG) - - spy_client_init = mocker.spy(tritonclient.grpc.InferenceServerClient, "__init__") - spy_get_model_config = mocker.spy(tritonclient.grpc.InferenceServerClient, "get_model_config") - client = ModelClient("grpc://localhost:8001", ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name, lazy_init=False) - - first_call = spy_client_init.mock_calls[0] - - assert first_call.args == (client._general_client, "localhost:8001") - assert first_call.kwargs == {} - - second_call = spy_client_init.mock_calls[1] - - assert second_call.args == (client._infer_client, "localhost:8001") - assert second_call.kwargs == {} - - # assert [(call.args, call.kwargs) for call in spy_client_init.mock_calls] == [ - # ( - # ( - # client._general_client, - # "localhost:8001", - # ), - # {}, - # ), - # ( - # ( - # client._infer_client, - # "localhost:8001", - # ), - # {}, - # ), - # ] - - spy_get_model_config.assert_called_once_with( - ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name, - "", - as_json=True, - # FIXME: GRPC client get_model_config doesn't support client_timeout parameter - # client_timeout=60.0, - ) - assert client.model_config == ADD_SUB_WITH_BATCHING_MODEL_CONFIG - - -def test_sync_grpc_client_model_config_raises_error_when_requested_unavailable_model(mocker): - from tritonclient.grpc import service_pb2 - - patch_grpc_client__server_up_and_ready(mocker) - mock_get_repo_index = mocker.patch.object(tritonclient.grpc.InferenceServerClient, "get_model_repository_index") - mock_get_repo_index.return_value = service_pb2.RepositoryIndexResponse( - models=[ - service_pb2.RepositoryIndexResponse.ModelIndex(name="OtherName", version="1", state="READY", reason=""), - ] - ) - mocker.patch.object(tritonclient.grpc.InferenceServerClient, "is_model_ready").return_value = False - - with ModelClient(_GRPC_LOCALHOST_URL, "NonExistentModel", init_timeout_s=1.5) as client: - with pytest.raises(PyTritonClientTimeoutError, match="Waiting for model (.*) to be ready timed out."): - _ = client.model_config - - with ModelClient(_GRPC_LOCALHOST_URL, "OtherName", "2", init_timeout_s=1.5) as client: - with pytest.raises(PyTritonClientTimeoutError, match="Waiting for model (.*) to be ready timed out."): - _ = client.model_config - - -def test_sync_grpc_client_infer_raises_error_when_requested_unavailable_model(mocker): - from tritonclient.grpc import service_pb2 - - patch_grpc_client__server_up_and_ready(mocker) - mock_get_repo_index = mocker.patch.object(tritonclient.grpc.InferenceServerClient, "get_model_repository_index") - mock_get_repo_index.return_value = service_pb2.RepositoryIndexResponse( - models=[ - service_pb2.RepositoryIndexResponse.ModelIndex(name="OtherName", version="1", state="READY", reason=""), - ] - ) - mocker.patch.object(tritonclient.grpc.InferenceServerClient, "is_model_ready").return_value = False - - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - - with ModelClient(_GRPC_LOCALHOST_URL, "NonExistentModel", init_timeout_s=1.5) as client: - with pytest.raises(PyTritonClientTimeoutError, match="Waiting for model (.*) to be ready timed out."): - _ = client.infer_sample(a, b) - - with ModelClient(_GRPC_LOCALHOST_URL, "NonExistentModel", init_timeout_s=1.5) as client: - with pytest.raises(PyTritonClientTimeoutError, match="Waiting for model (.*) to be ready timed out."): - _ = client.infer_batch(a, b) - - with ModelClient(_GRPC_LOCALHOST_URL, "OtherName", "2", init_timeout_s=1.5) as client: - with pytest.raises(PyTritonClientTimeoutError, match="Waiting for model (.*) to be ready timed out."): - _ = client.infer_sample(a, b) - - with ModelClient(_GRPC_LOCALHOST_URL, "OtherName", "2", init_timeout_s=1.5) as client: - with pytest.raises(PyTritonClientTimeoutError, match="Waiting for model (.*) to be ready timed out."): - _ = client.infer_batch(a, b) - - -def test_sync_grpc_client_infer_sample_returns_expected_result_when_positional_args_are_used(mocker): - patch_grpc_client__server_up_and_ready(mocker) - patch_grpc_client__model_up_and_ready(mocker, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG) - - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - expected_result = {"add": a + b, "sub": a - b} - server_result = expected_result - - with ModelClient(_GRPC_LOCALHOST_URL, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name) as client: - mock_infer = mocker.patch.object(client._infer_client, "infer") - mock_infer.return_value = wrap_to_grpc_infer_result(ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG, "0", server_result) - result = client.infer_sample(a, b) - - called_kwargs = mock_infer.call_args.kwargs - expected_kwargs = dict(EXPECTED_KWARGS_GRPC_DEFAULT) - expected_kwargs.update( - { - "model_name": ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name, - "model_version": "", - "request_id": "0", - "inputs": {"a": a, "b": b}, - "outputs": list(expected_result), - "parameters": None, - "headers": None, - } - ) - for arg_name, arg_value in expected_kwargs.items(): - if arg_name not in ["inputs", "outputs"]: # inputs and outputs requires manual verification - assert called_kwargs.get(arg_name) == arg_value - for key in called_kwargs: - assert key in expected_kwargs - assert [output.name() for output in called_kwargs.get("outputs")] == list(expected_kwargs["outputs"]) - inputs_called_arg = {i.name(): extract_array_from_grpc_infer_input(i) for i in called_kwargs.get("inputs")} - verify_equalness_of_dicts_with_ndarray(inputs_called_arg, expected_kwargs["inputs"]) - verify_equalness_of_dicts_with_ndarray(expected_result, result) - - -def test_sync_grpc_client_infer_sample_returns_expected_result_when_infer_on_model_with_batching(mocker): - patch_grpc_client__server_up_and_ready(mocker) - patch_grpc_client__model_up_and_ready(mocker, ADD_SUB_WITH_BATCHING_MODEL_CONFIG) - - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - expected_result = {"add": a + b, "sub": a - b} - # server will return data with additional axis - server_result = {name: data[np.newaxis, ...] for name, data in expected_result.items()} - - with ModelClient(_GRPC_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - mock_infer = mocker.patch.object(client._infer_client, "infer") - mock_infer.return_value = wrap_to_grpc_infer_result(ADD_SUB_WITH_BATCHING_MODEL_CONFIG, "0", server_result) - - inputs_dict = {"a": a, "b": b} - result = client.infer_sample(**inputs_dict) - - called_kwargs = mock_infer.call_args.kwargs - expected_kwargs = dict(EXPECTED_KWARGS_GRPC_DEFAULT) - expected_kwargs.update( - { - "model_name": ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name, - # expect to send data with additional batch axis - "inputs": {name: data[np.newaxis, ...] for name, data in inputs_dict.items()}, - "outputs": list(expected_result), - } - ) - for arg_name, arg_value in expected_kwargs.items(): - if arg_name not in ["inputs", "outputs"]: # inputs and outputs requires manual verification - assert called_kwargs.get(arg_name) == arg_value - for key in called_kwargs: - assert key in expected_kwargs - assert [output.name() for output in called_kwargs.get("outputs")] == list(expected_kwargs["outputs"]) - inputs_called_arg = {i.name(): extract_array_from_grpc_infer_input(i) for i in called_kwargs.get("inputs")} - verify_equalness_of_dicts_with_ndarray(inputs_called_arg, expected_kwargs["inputs"]) - - verify_equalness_of_dicts_with_ndarray(expected_result, result) - - -def test_sync_grpc_client_infer_sample_returns_expected_result_when_named_args_are_used(mocker): - patch_grpc_client__server_up_and_ready(mocker) - patch_grpc_client__model_up_and_ready(mocker, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG) - - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - expected_result = {"add": a + b, "sub": a - b} - server_result = {"add": a + b, "sub": a - b} - - with ModelClient(_GRPC_LOCALHOST_URL, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name) as client: - mock_infer = mocker.patch.object(client._infer_client, "infer") - mock_infer.return_value = wrap_to_grpc_infer_result(ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG, "0", server_result) - - inputs_dict = {"a": a, "b": b} - result = client.infer_sample(**inputs_dict) - - called_kwargs = mock_infer.call_args.kwargs - expected_kwargs = dict(EXPECTED_KWARGS_GRPC_DEFAULT) - expected_kwargs.update( - { - "model_name": ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name, - "inputs": inputs_dict, - "outputs": list(expected_result), - } - ) - for arg_name, arg_value in expected_kwargs.items(): - if arg_name not in ["inputs", "outputs"]: # inputs and outputs requires manual verification - assert called_kwargs.get(arg_name) == arg_value - for key in called_kwargs: - assert key in expected_kwargs - assert [output.name() for output in called_kwargs.get("outputs")] == list(expected_kwargs["outputs"]) - inputs_called_arg = {i.name(): extract_array_from_grpc_infer_input(i) for i in called_kwargs.get("inputs")} - verify_equalness_of_dicts_with_ndarray(inputs_called_arg, expected_kwargs["inputs"]) - - verify_equalness_of_dicts_with_ndarray(expected_result, result) - - -def test_sync_grpc_client_infer_batch_returns_expected_result_when_positional_args_are_used(mocker): - patch_grpc_client__server_up_and_ready(mocker) - patch_grpc_client__model_up_and_ready(mocker, ADD_SUB_WITH_BATCHING_MODEL_CONFIG) - - a = np.array([[1], [1]], dtype=np.float32) - b = np.array([[1], [1]], dtype=np.float32) - expected_result = {"add": a + b, "sub": a - b} - server_result = expected_result - - with ModelClient(_GRPC_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - mock_infer = mocker.patch.object(client._infer_client, "infer") - mock_infer.return_value = wrap_to_grpc_infer_result(ADD_SUB_WITH_BATCHING_MODEL_CONFIG, "0", server_result) - result = client.infer_batch(a, b) - - called_kwargs = mock_infer.call_args.kwargs - expected_kwargs = dict(EXPECTED_KWARGS_GRPC_DEFAULT) - expected_kwargs.update( - { - "inputs": {"a": a, "b": b}, - "outputs": list(expected_result), - } - ) - for arg_name, arg_value in expected_kwargs.items(): - if arg_name not in ["inputs", "outputs"]: # inputs and outputs requires manual verification - assert called_kwargs.get(arg_name) == arg_value - for key in called_kwargs: - assert key in expected_kwargs - assert [output.name() for output in called_kwargs.get("outputs")] == list(expected_kwargs["outputs"]) - inputs_called_arg = {i.name(): extract_array_from_grpc_infer_input(i) for i in called_kwargs.get("inputs")} - verify_equalness_of_dicts_with_ndarray(inputs_called_arg, expected_kwargs["inputs"]) - - verify_equalness_of_dicts_with_ndarray(expected_result, result) - - -def test_sync_grpc_client_infer_batch_returns_expected_result_when_named_args_are_used(mocker): - patch_grpc_client__server_up_and_ready(mocker) - patch_grpc_client__model_up_and_ready(mocker, ADD_SUB_WITH_BATCHING_MODEL_CONFIG) - - a = np.array([[1], [1]], dtype=np.float32) - b = np.array([[1], [1]], dtype=np.float32) - expected_result = {"add": a + b, "sub": a - b} - server_result = expected_result - - with ModelClient(_GRPC_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - mock_infer = mocker.patch.object(client._infer_client, "infer") - mock_infer.return_value = wrap_to_grpc_infer_result(ADD_SUB_WITH_BATCHING_MODEL_CONFIG, "0", server_result) - - inputs_dict = {"a": a, "b": b} - result = client.infer_batch(**inputs_dict) - - called_kwargs = mock_infer.call_args.kwargs - expected_kwargs = dict(EXPECTED_KWARGS_GRPC_DEFAULT) - expected_kwargs.update( - { - "inputs": inputs_dict, - "outputs": list(expected_result), - } - ) - for arg_name, arg_value in expected_kwargs.items(): - if arg_name not in ["inputs", "outputs"]: # inputs and outputs requires manual verification - assert called_kwargs.get(arg_name) == arg_value - for key in called_kwargs: - assert key in expected_kwargs - assert [output.name() for output in called_kwargs.get("outputs")] == list(expected_kwargs["outputs"]) - inputs_called_arg = {i.name(): extract_array_from_grpc_infer_input(i) for i in called_kwargs.get("inputs")} - verify_equalness_of_dicts_with_ndarray(inputs_called_arg, expected_kwargs["inputs"]) - - verify_equalness_of_dicts_with_ndarray(expected_result, result) - - -def test_sync_grpc_client_infer_batch_raises_error_when_model_doesnt_support_batching(mocker): - patch_grpc_client__server_up_and_ready(mocker) - patch_grpc_client__model_up_and_ready(mocker, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG) - - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - - with ModelClient(_GRPC_LOCALHOST_URL, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name) as client: - with pytest.raises(PyTritonClientModelDoesntSupportBatchingError): - client.infer_batch(a=a, b=b) - - -def test_sync_grpc_client_infer_raises_error_when_mixed_args_convention_used(mocker): - patch_grpc_client__server_up_and_ready(mocker) - patch_grpc_client__model_up_and_ready(mocker, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG) - - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - - with ModelClient(_GRPC_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - with pytest.raises( - PyTritonClientValueError, - match="Use either positional either keyword method arguments convention", - ): - client.infer_sample(a, b=b) - - with ModelClient(_GRPC_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - with pytest.raises( - PyTritonClientValueError, - match="Use either positional either keyword method arguments convention", - ): - client.infer_batch(a, b=b) - - -def test_sync_grpc_client_infer_raises_error_when_no_args_provided(mocker): - patch_grpc_client__server_up_and_ready(mocker) - patch_grpc_client__model_up_and_ready(mocker, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG) - - with ModelClient(_GRPC_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - with pytest.raises(PyTritonClientValueError, match="Provide input data"): - client.infer_sample() - - with ModelClient(_GRPC_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - with pytest.raises(PyTritonClientValueError, match="Provide input data"): - client.infer_batch() - - -def test_sync_http_client_init_obtain_expected_model_config_when_lazy_init_is_disabled(mocker): - from pytriton.client.client import DEFAULT_INFERENCE_TIMEOUT_S - - patch_http_client__server_up_and_ready(mocker) - patch_http_client__model_up_and_ready(mocker, ADD_SUB_WITH_BATCHING_MODEL_CONFIG) - - spy_client_init = mocker.spy(tritonclient.http.InferenceServerClient, "__init__") - client = ModelClient(_HTTP_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name, lazy_init=False) - - first_call = spy_client_init.mock_calls[0] - - assert first_call.args == (client._general_client, "localhost:8000") - assert first_call.kwargs == { - "connection_timeout": _DEFAULT_NETWORK_TIMEOUT_S, - "network_timeout": _DEFAULT_NETWORK_TIMEOUT_S, - } - - second_call = spy_client_init.mock_calls[1] - - assert second_call.args == (client._infer_client, "localhost:8000") - assert second_call.kwargs == { - "connection_timeout": DEFAULT_INFERENCE_TIMEOUT_S, - "network_timeout": DEFAULT_INFERENCE_TIMEOUT_S, - } - - # assert [(call.args, call.kwargs) for call in spy_client_init.mock_calls] == [ - # ( - # (client._general_client, "localhost:8000"), - # {"connection_timeout": _DEFAULT_NETWORK_TIMEOUT_S, "network_timeout": _DEFAULT_NETWORK_TIMEOUT_S}, - # ), - # ( - # (client._infer_client, "localhost:8000"), - # {"connection_timeout": DEFAULT_INFERENCE_TIMEOUT_S, "network_timeout": DEFAULT_INFERENCE_TIMEOUT_S}, - # ), - # ] - assert client.model_config == ADD_SUB_WITH_BATCHING_MODEL_CONFIG - - -def test_sync_http_client_init_raises_error_when_use_non_lazy_init(): - with pytest.raises(PyTritonClientTimeoutError, match="Waiting for (.*) to be ready timed out."): - ModelClient("http://dummy:43299", "dummy", lazy_init=False, init_timeout_s=1) - - -def test_sync_http_client_init_raises_error_when_requested_unavailable_model_and_non_lazy_init_called(mocker): - patch_http_client__server_up_and_ready(mocker) - mock_get_repo_index = mocker.patch.object(tritonclient.http.InferenceServerClient, "get_model_repository_index") - mock_get_repo_index.return_value = [{"name": "OtherName", "version": "1", "state": "READY", "reason": ""}] - mocker.patch.object(tritonclient.http.InferenceServerClient, "is_model_ready").return_value = False - - with pytest.raises(PyTritonClientTimeoutError, match="Waiting for model (.*) to be ready timed out."): - ModelClient(_HTTP_LOCALHOST_URL, "NotExistentModel", lazy_init=False, init_timeout_s=1.5) - - with pytest.raises(PyTritonClientTimeoutError, match="Waiting for model (.*) to be ready timed out."): - ModelClient(_HTTP_LOCALHOST_URL, "OtherName", "2", lazy_init=False, init_timeout_s=1.5) - - -def test_sync_http_client_model_config_raises_error_when_requested_unavailable_model(mocker): - patch_http_client__server_up_and_ready(mocker) - mock_get_repo_index = mocker.patch.object(tritonclient.http.InferenceServerClient, "get_model_repository_index") - mock_get_repo_index.return_value = [{"name": "OtherName", "version": "1", "state": "READY", "reason": ""}] - mocker.patch.object(tritonclient.http.InferenceServerClient, "is_model_ready").return_value = False - - with ModelClient(_HTTP_LOCALHOST_URL, "NonExistentModel", init_timeout_s=1.5) as client: - with pytest.raises(PyTritonClientTimeoutError, match="Waiting for model (.*) to be ready timed out."): - _ = client.model_config - - with ModelClient(_HTTP_LOCALHOST_URL, "OtherName", "2", init_timeout_s=1.5) as client: - with pytest.raises(PyTritonClientTimeoutError, match="Waiting for model (.*) to be ready timed out."): - _ = client.model_config - - -def test_sync_http_client_infer_raises_error_when_requested_unavailable_model(mocker): - patch_http_client__server_up_and_ready(mocker) - mock_get_repo_index = mocker.patch.object(tritonclient.http.InferenceServerClient, "get_model_repository_index") - mock_get_repo_index.return_value = [{"name": "OtherName", "version": "1", "state": "READY", "reason": ""}] - mocker.patch.object(tritonclient.http.InferenceServerClient, "is_model_ready").return_value = False - - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - - with ModelClient(_HTTP_LOCALHOST_URL, "NonExistentModel", init_timeout_s=1.5) as client: - with pytest.raises(PyTritonClientTimeoutError, match="Waiting for model (.*) to be ready timed out."): - _ = client.infer_sample(a, b) - - with ModelClient(_HTTP_LOCALHOST_URL, "NonExistentModel", init_timeout_s=1.5) as client: - with pytest.raises(PyTritonClientTimeoutError, match="Waiting for model (.*) to be ready timed out."): - _ = client.infer_batch(a, b) - - with ModelClient(_HTTP_LOCALHOST_URL, "OtherName", "2", init_timeout_s=1.5) as client: - with pytest.raises(PyTritonClientTimeoutError, match="Waiting for model (.*) to be ready timed out."): - _ = client.infer_sample(a, b) - - with ModelClient(_HTTP_LOCALHOST_URL, "OtherName", "2", init_timeout_s=1.5) as client: - with pytest.raises(PyTritonClientTimeoutError, match="Waiting for model (.*) to be ready timed out."): - _ = client.infer_batch(a, b) - - -def test_sync_http_client_infer_sample_returns_expected_result_when_infer_on_model_with_batching(mocker): - patch_http_client__server_up_and_ready(mocker) - patch_http_client__model_up_and_ready(mocker, ADD_SUB_WITH_BATCHING_MODEL_CONFIG) - - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - expected_result = {"add": a + b, "sub": a - b} - # server will return data with additional axis - server_result = {name: data[np.newaxis, ...] for name, data in expected_result.items()} - - with ModelClient(_HTTP_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - mock_infer = mocker.patch.object(client._infer_client, "infer") - mock_infer.return_value = wrap_to_http_infer_result(ADD_SUB_WITH_BATCHING_MODEL_CONFIG, "0", server_result) - result = client.infer_sample(a, b) - - called_kwargs = mock_infer.call_args.kwargs - expected_kwargs = dict(EXPECTED_KWARGS_HTTP_DEFAULT) - expected_kwargs.update( - { - # expect to send data with additional batch axis - "inputs": {"a": a[np.newaxis, ...], "b": b[np.newaxis, ...]}, - "outputs": list(expected_result), - } - ) - for arg_name, arg_value in expected_kwargs.items(): - if arg_name not in ["inputs", "outputs"]: # inputs and outputs requires manual verification - assert called_kwargs.get(arg_name) == arg_value - for key in called_kwargs: - assert key in expected_kwargs - assert [output.name() for output in called_kwargs.get("outputs")] == list(expected_kwargs["outputs"]) - inputs_called_arg = {i.name(): extract_array_from_http_infer_input(i) for i in called_kwargs.get("inputs")} - verify_equalness_of_dicts_with_ndarray(inputs_called_arg, expected_kwargs["inputs"]) - - verify_equalness_of_dicts_with_ndarray(expected_result, result) - - -def test_sync_http_client_infer_sample_returns_expected_result_when_positional_args_are_used(mocker): - patch_http_client__server_up_and_ready(mocker) - patch_http_client__model_up_and_ready(mocker, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG) - - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - expected_result = {"add": a + b, "sub": a - b} - server_result = expected_result - - with ModelClient(_HTTP_LOCALHOST_URL, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name) as client: - mock_infer = mocker.patch.object(client._infer_client, "infer") - mock_infer.return_value = wrap_to_http_infer_result(ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG, "0", server_result) - result = client.infer_sample(a, b) - - called_kwargs = mock_infer.call_args.kwargs - expected_kwargs = dict(EXPECTED_KWARGS_HTTP_DEFAULT) - expected_kwargs.update( - { - "model_name": ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name, - "inputs": {"a": a, "b": b}, - "outputs": list(expected_result), - } - ) - for arg_name, arg_value in expected_kwargs.items(): - if arg_name not in ["inputs", "outputs"]: # inputs and outputs requires manual verification - assert called_kwargs.get(arg_name) == arg_value - for key in called_kwargs: - assert key in expected_kwargs - assert [output.name() for output in called_kwargs.get("outputs")] == list(expected_kwargs["outputs"]) - inputs_called_arg = {i.name(): extract_array_from_http_infer_input(i) for i in called_kwargs.get("inputs")} - verify_equalness_of_dicts_with_ndarray(inputs_called_arg, expected_kwargs["inputs"]) - - verify_equalness_of_dicts_with_ndarray(expected_result, result) - - -@pytest.fixture(params=["after_infer", "no_infer"]) -def infer_state(request): - return request.param - - -def test_sync_http_client_infer_sample_from_existing_client(mocker, infer_state): - patch_http_client__server_up_and_ready(mocker) - patch_http_client__model_up_and_ready(mocker, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG) - - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - expected_result = {"add": a + b, "sub": a - b} - server_result = expected_result - - with ModelClient(_HTTP_LOCALHOST_URL, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name) as client: - mock_infer = mocker.patch.object(client._infer_client, "infer") - mock_infer.return_value = wrap_to_http_infer_result(ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG, "0", server_result) - - if infer_state == "after_infer": - client.infer_sample(a, b) - # After client is created, there should be no call to get_model_config - spy_get_model_config = mocker.spy(tritonclient.http.InferenceServerClient, "get_model_config") - spy_is_server_ready = mocker.spy(tritonclient.http.InferenceServerClient, "is_server_ready") - spy_is_server_live = mocker.spy(tritonclient.http.InferenceServerClient, "is_server_live") - with ModelClient.from_existing_client(client) as client_from_existing: - mock_infer_from_existing = mocker.patch.object(client_from_existing._infer_client, "infer") - mock_infer_from_existing.return_value = mock_infer.return_value - result_from_existing = client_from_existing.infer_sample(a, b) - - if infer_state == "after_infer": - spy_get_model_config.not_called() - spy_is_server_ready.not_called() - spy_is_server_live.not_called() - else: - assert len(spy_get_model_config.mock_calls) == 2 - assert len(spy_is_server_ready.mock_calls) == 3 - assert len(spy_is_server_live.mock_calls) == 3 - - called_kwargs = mock_infer_from_existing.call_args.kwargs - expected_kwargs = dict(EXPECTED_KWARGS_HTTP_DEFAULT) - expected_kwargs.update( - { - "model_name": ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name, - "inputs": {"a": a, "b": b}, - "outputs": list(expected_result), - } - ) - for arg_name, arg_value in expected_kwargs.items(): - if arg_name not in ["inputs", "outputs"]: # inputs and outputs requires manual verification - assert called_kwargs.get(arg_name) == arg_value - for key in called_kwargs: - assert key in expected_kwargs - assert [output.name() for output in called_kwargs.get("outputs")] == list(expected_kwargs["outputs"]) - inputs_called_arg = {i.name(): extract_array_from_http_infer_input(i) for i in called_kwargs.get("inputs")} - verify_equalness_of_dicts_with_ndarray(inputs_called_arg, expected_kwargs["inputs"]) - - verify_equalness_of_dicts_with_ndarray(expected_result, result_from_existing) - - -@pytest.fixture(params=["ensure_model_is_ready=True", "ensure_model_is_ready=False"]) -def ensure_model_is_ready(request): - return request.param - - -def test_sync_http_client_infer_batch_init_from_client(mocker, ensure_model_is_ready): - patch_http_client__server_up_and_ready(mocker) - patch_http_client__model_up_and_ready(mocker, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG) - - ensure_model_is_ready = ensure_model_is_ready == "ensure_model_is_ready=True" - - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - expected_result = {"add": a + b, "sub": a - b} - server_result = expected_result - - # After client is created, there should be no call to get_model_config - spy_get_model_config = mocker.spy(tritonclient.http.InferenceServerClient, "get_model_config") - spy_is_server_ready = mocker.spy(tritonclient.http.InferenceServerClient, "is_server_ready") - spy_is_server_live = mocker.spy(tritonclient.http.InferenceServerClient, "is_server_live") - with ModelClient( - url=_HTTP_LOCALHOST_URL, - model_name=ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name, - model_config=ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG, - ensure_model_is_ready=ensure_model_is_ready, - ) as client_from_existing: - mock_infer_from_existing = mocker.patch.object(client_from_existing._infer_client, "infer") - mock_infer_from_existing.return_value = wrap_to_http_infer_result( - ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG, "0", server_result - ) - result_from_existing = client_from_existing.infer_batch(a, b) - - if ensure_model_is_ready: - spy_get_model_config.not_called() - assert len(spy_is_server_ready.mock_calls) == 2 - assert len(spy_is_server_live.mock_calls) == 2 - else: - spy_get_model_config.not_called() - spy_is_server_ready.not_called() - spy_is_server_live.not_called() - - called_kwargs = mock_infer_from_existing.call_args.kwargs - expected_kwargs = dict(EXPECTED_KWARGS_HTTP_DEFAULT) - expected_kwargs.update( - { - "model_name": ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name, - "inputs": {"a": a, "b": b}, - "outputs": list(expected_result), - } - ) - for arg_name, arg_value in expected_kwargs.items(): - if arg_name not in ["inputs", "outputs"]: # inputs and outputs requires manual verification - assert called_kwargs.get(arg_name) == arg_value - for key in called_kwargs: - assert key in expected_kwargs - assert [output.name() for output in called_kwargs.get("outputs")] == list(expected_kwargs["outputs"]) - inputs_called_arg = {i.name(): extract_array_from_http_infer_input(i) for i in called_kwargs.get("inputs")} - verify_equalness_of_dicts_with_ndarray(inputs_called_arg, expected_kwargs["inputs"]) - - verify_equalness_of_dicts_with_ndarray(expected_result, result_from_existing) - - -def test_sync_http_client_infer_sample_returns_expected_result_when_named_args_are_used(mocker): - patch_http_client__server_up_and_ready(mocker) - patch_http_client__model_up_and_ready(mocker, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG) - - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - expected_result = {"add": a + b, "sub": a - b} - server_result = {"add": a + b, "sub": a - b} - - with ModelClient(_HTTP_LOCALHOST_URL, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name) as client: - mock_infer = mocker.patch.object(client._infer_client, "infer") - mock_infer.return_value = wrap_to_http_infer_result(ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG, "0", server_result) - - inputs_dict = {"a": a, "b": b} - result = client.infer_sample(**inputs_dict) - - called_kwargs = mock_infer.call_args.kwargs - expected_kwargs = dict(EXPECTED_KWARGS_HTTP_DEFAULT) - expected_kwargs.update( - { - "model_name": ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name, - "inputs": inputs_dict, - "outputs": list(expected_result), - } - ) - for arg_name, arg_value in expected_kwargs.items(): - if arg_name not in ["inputs", "outputs"]: # inputs and outputs requires manual verification - assert called_kwargs.get(arg_name) == arg_value - for key in called_kwargs: - assert key in expected_kwargs - assert [output.name() for output in called_kwargs.get("outputs")] == list(expected_kwargs["outputs"]) - inputs_called_arg = {i.name(): extract_array_from_http_infer_input(i) for i in called_kwargs.get("inputs")} - verify_equalness_of_dicts_with_ndarray(inputs_called_arg, expected_kwargs["inputs"]) - - verify_equalness_of_dicts_with_ndarray(expected_result, result) - - -def test_sync_http_client_infer_batch_returns_expected_result_when_positional_args_are_used(mocker): - patch_http_client__server_up_and_ready(mocker) - patch_http_client__model_up_and_ready(mocker, ADD_SUB_WITH_BATCHING_MODEL_CONFIG) - - a = np.array([[1], [1]], dtype=np.float32) - b = np.array([[1], [1]], dtype=np.float32) - expected_result = {"add": a + b, "sub": a - b} - server_result = expected_result - - with ModelClient(_HTTP_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - mock_infer = mocker.patch.object(client._infer_client, "infer") - mock_infer.return_value = wrap_to_http_infer_result(ADD_SUB_WITH_BATCHING_MODEL_CONFIG, "0", server_result) - result = client.infer_batch(a, b) - - called_kwargs = mock_infer.call_args.kwargs - expected_kwargs = dict(EXPECTED_KWARGS_HTTP_DEFAULT) - expected_kwargs.update( - { - "inputs": {"a": a, "b": b}, - "outputs": list(expected_result), - } - ) - for arg_name, arg_value in expected_kwargs.items(): - if arg_name not in ["inputs", "outputs"]: # inputs and outputs requires manual verification - assert called_kwargs.get(arg_name) == arg_value - for key in called_kwargs: - assert key in expected_kwargs - assert [output.name() for output in called_kwargs.get("outputs")] == list(expected_kwargs["outputs"]) - inputs_called_arg = {i.name(): extract_array_from_http_infer_input(i) for i in called_kwargs.get("inputs")} - verify_equalness_of_dicts_with_ndarray(inputs_called_arg, expected_kwargs["inputs"]) - - verify_equalness_of_dicts_with_ndarray(expected_result, result) - - -def test_sync_http_client_infer_batch_returns_expected_result_when_named_args_are_used(mocker): - patch_http_client__server_up_and_ready(mocker) - patch_http_client__model_up_and_ready(mocker, ADD_SUB_WITH_BATCHING_MODEL_CONFIG) - - a = np.array([[1], [1]], dtype=np.float32) - b = np.array([[1], [1]], dtype=np.float32) - expected_result = {"add": a + b, "sub": a - b} - server_result = expected_result - - with ModelClient(_HTTP_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - mock_infer = mocker.patch.object(client._infer_client, "infer") - mock_infer.return_value = wrap_to_http_infer_result(ADD_SUB_WITH_BATCHING_MODEL_CONFIG, "0", server_result) - - inputs_dict = {"a": a, "b": b} - result = client.infer_batch(**inputs_dict) - - called_kwargs = mock_infer.call_args.kwargs - expected_kwargs = dict(EXPECTED_KWARGS_HTTP_DEFAULT) - expected_kwargs.update( - { - "inputs": inputs_dict, - "outputs": list(expected_result), - } - ) - for arg_name, arg_value in expected_kwargs.items(): - if arg_name not in ["inputs", "outputs"]: # inputs and outputs requires manual verification - assert called_kwargs.get(arg_name) == arg_value - for key in called_kwargs: - assert key in expected_kwargs - assert [output.name() for output in called_kwargs.get("outputs")] == list(expected_kwargs["outputs"]) - inputs_called_arg = {i.name(): extract_array_from_http_infer_input(i) for i in called_kwargs.get("inputs")} - verify_equalness_of_dicts_with_ndarray(inputs_called_arg, expected_kwargs["inputs"]) - - verify_equalness_of_dicts_with_ndarray(expected_result, result) - - -def test_sync_http_client_infer_batch_raises_error_when_model_doesnt_support_batching(mocker): - patch_http_client__server_up_and_ready(mocker) - patch_http_client__model_up_and_ready(mocker, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG) - - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - - with ModelClient(_HTTP_LOCALHOST_URL, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG.model_name) as client: - with pytest.raises(PyTritonClientModelDoesntSupportBatchingError): - client.infer_batch(a, b) - - -def test_sync_http_client_infer_raises_error_when_mixed_args_convention_used(mocker): - patch_http_client__server_up_and_ready(mocker) - patch_http_client__model_up_and_ready(mocker, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG) - - a = np.array([1], dtype=np.float32) - b = np.array([1], dtype=np.float32) - - with ModelClient(_HTTP_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - with pytest.raises( - PyTritonClientValueError, - match="Use either positional either keyword method arguments convention", - ): - client.infer_sample(a, b=b) - - with ModelClient(_HTTP_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - with pytest.raises( - PyTritonClientValueError, - match="Use either positional either keyword method arguments convention", - ): - client.infer_batch(a, b=b) - - -def test_sync_http_client_infer_raises_error_when_no_args_provided(mocker): - patch_http_client__server_up_and_ready(mocker) - patch_http_client__model_up_and_ready(mocker, ADD_SUB_WITHOUT_BATCHING_MODEL_CONFIG) - - with ModelClient(_HTTP_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - with pytest.raises(PyTritonClientValueError, match="Provide input data"): - client.infer_sample() - - with ModelClient(_HTTP_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) as client: - with pytest.raises(PyTritonClientValueError, match="Provide input data"): - client.infer_batch() - - -@pytest.mark.filterwarnings("error::pytest.PytestUnraisableExceptionWarning") -def test_del_of_http_client_does_not_raise_error(): - def _del(client): - del client._general_client - del client._infer_client - - def _create_client_and_delete(): - client = ModelClient(_HTTP_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) - client.close() - threading.Thread(target=_del, args=(client,)).start() - - _create_client_and_delete() - time.sleep(0.1) - gc.collect() - - -@pytest.mark.filterwarnings("error::pytest.PytestUnraisableExceptionWarning") -def test_del_of_grpc_client_does_not_raise_error(): - def _del(client): - del client._general_client - del client._infer_client - - def _create_client_and_delete(): - client = ModelClient(_GRPC_LOCALHOST_URL, ADD_SUB_WITH_BATCHING_MODEL_CONFIG.model_name) - client.close() - threading.Thread(target=_del, args=(client,)).start() - - _create_client_and_delete() - time.sleep(0.1) - gc.collect() - - -@pytest.mark.timeout(1.0) -def test_init_http_passes_timeout(): - with ModelClient("http://localhost:6669", "dummy", init_timeout_s=0.2, inference_timeout_s=0.1) as client: - with pytest.raises(PyTritonClientTimeoutError): - client.wait_for_model(timeout_s=0.2) - - -@pytest.mark.timeout(5) -def test_init_grpc_passes_timeout_5(): - with ModelClient("grpc://localhost:6669", "dummy", init_timeout_s=0.2, inference_timeout_s=0.1) as client: - with pytest.raises(PyTritonClientTimeoutError): - client.wait_for_model(timeout_s=0.2) - - -def test_http_client_raises_error_when_used_after_close(mocker): - patch_http_client__server_up_and_ready(mocker) - patch_http_client__model_up_and_ready(mocker, ADD_SUB_WITH_BATCHING_MODEL_CONFIG) - - with ModelClient(_HTTP_LOCALHOST_URL, "dummy") as client: - pass - - with pytest.raises(PyTritonClientClosedError): - client.wait_for_model(timeout_s=0.2) - - a = np.array([1], dtype=np.float32) - with pytest.raises(PyTritonClientClosedError): - client.infer_sample(a=a) - - with pytest.raises(PyTritonClientClosedError): - client.infer_batch(a=[a]) - - -def test_grpc_client_raises_error_when_used_after_close(mocker): - patch_grpc_client__server_up_and_ready(mocker) - patch_grpc_client__model_up_and_ready(mocker, ADD_SUB_WITH_BATCHING_MODEL_CONFIG) - - with ModelClient(_GRPC_LOCALHOST_URL, "dummy") as client: - pass - - with pytest.raises(PyTritonClientClosedError): - client.wait_for_model(timeout_s=0.2) - - a = np.array([1], dtype=np.float32) - with pytest.raises(PyTritonClientClosedError): - client.infer_sample(a=a) - - with pytest.raises(PyTritonClientClosedError): - client.infer_batch(a=[a]) diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_tensor.py b/stf/stf-api-alternative/pytriton/tests/unit/test_tensor.py deleted file mode 100644 index 93d914bd12ed94dddcf908c04101088795fd5d98..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_tensor.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import dataclasses - -import numpy as np -import pytest - -from pytriton.model_config import Tensor - - -def test_tensor_set_correct_dtype_when_np_dtype_passed(): - tensor = Tensor(name="variable", dtype=np.float32().dtype, shape=(2, 1)) - - assert tensor.dtype == np.float32().dtype.type - - -def test_tensor_raise_error_when_mutate_field(): - tensor = Tensor(name="variable", dtype=np.float32, shape=(2, 1)) - - with pytest.raises(dataclasses.FrozenInstanceError): - tensor.dtype = np.int32().dtype diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_triton.py b/stf/stf-api-alternative/pytriton/tests/unit/test_triton.py deleted file mode 100644 index 8bdb3e6a0631329a68f938b6c8852991ae496002..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_triton.py +++ /dev/null @@ -1,104 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import pathlib -import re -from unittest.mock import PropertyMock - -import pytest - -from pytriton.exceptions import PyTritonValidationError -from pytriton.triton import ( - GROWTH_BACKEND_SHM_SIZE, - INITIAL_BACKEND_SHM_SIZE, - TRITONSERVER_DIST_DIR, - Triton, - TritonConfig, -) - -EXPECTED_BACKEND_ARGS = [ - "python,shm-region-prefix-name=pytrtion[0-9]+", - f"python,shm-default-byte-size={INITIAL_BACKEND_SHM_SIZE}", - f"python,shm-growth-byte-size={GROWTH_BACKEND_SHM_SIZE}", -] - - -def test_triton_is_alive_return_false_when_not_initialized(): - triton = Triton() - - assert triton._triton_server is None - assert triton.is_alive() is False - - -def test_triton_server_initialize_server_with_default_arguments(mocker): - triton = Triton() - triton._prepare_triton_inference_server() - - assert triton._triton_server_config["model_repository"] is not None - assert triton._triton_server_config["backend_directory"] is not None - assert len(triton._triton_server_config["backend_config"]) == 3 - for idx in range(len(EXPECTED_BACKEND_ARGS)): - assert re.match(EXPECTED_BACKEND_ARGS[idx], triton._triton_server_config["backend_config"][idx]) - - -def test_triton_server_initialize_server_with_custom_arguments(mocker): - config = TritonConfig(id="CustomId", model_repository=pathlib.Path("/tmp"), allow_metrics=False) - triton = Triton(config=config) - triton._prepare_triton_inference_server() - - assert triton._triton_server_config["id"] == "CustomId" - assert triton._triton_server_config["model_repository"] == "/tmp" - assert triton._triton_server_config["allow_metrics"] is False - assert triton._triton_server_config["backend_directory"] is not None - for idx in range(len(EXPECTED_BACKEND_ARGS)): - assert re.match(EXPECTED_BACKEND_ARGS[idx], triton._triton_server_config["backend_config"][idx]) - - -def test_triton_server_initialize_server_with_custom_arguments_and_env_variables(mocker): - import os - - updated_environ = { - **os.environ, - "PYTRITON_TRITON_CONFIG_GRPC_PORT": "8080", - "PYTRITON_TRITON_CONFIG_MODEL_REPOSITORY": "/opt", - } - mocker.patch("os.environ", new_callable=PropertyMock(return_value=updated_environ)) - - config = TritonConfig(id="CustomId", model_repository=pathlib.Path("/tmp"), allow_metrics=False) - triton = Triton(config=config) - triton._prepare_triton_inference_server() - - assert triton._triton_server_config["id"] == "CustomId" - assert triton._triton_server_config["model_repository"] == "/tmp" - assert triton._triton_server_config["grpc_port"] == 8080 - assert triton._triton_server_config["allow_metrics"] is False - assert triton._triton_server_config["backend_directory"] is not None - assert triton._triton_server_config["backend_config"] is not None - for idx in range(len(EXPECTED_BACKEND_ARGS)): - assert re.match(EXPECTED_BACKEND_ARGS[idx], triton._triton_server_config["backend_config"][idx]) - - -def test_triton_bind_model_name_verification(mocker): - mocker.patch.object(Triton, "_prepare_triton_inference_server").return_value = TRITONSERVER_DIST_DIR - - triton = Triton() - triton.bind("AB-cd_90.1", lambda: None, [], []) - - with pytest.raises( - PyTritonValidationError, - match="Model name can only contain alphanumeric characters, dots, underscores and dashes", - ): - triton.bind("AB#cd/90/1", lambda: None, [], []) - - with pytest.raises(PyTritonValidationError, match="Model name cannot be empty"): - triton.bind("", lambda: None, [], []) diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_triton_config.py b/stf/stf-api-alternative/pytriton/tests/unit/test_triton_config.py deleted file mode 100644 index c31bd23689307e4b81e3eb4a23c217dc3618936f..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_triton_config.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import pytest - -from pytriton.server.python_backend_config import PythonBackendConfig -from pytriton.server.triton_server_config import TritonServerConfig -from pytriton.triton import INITIAL_BACKEND_SHM_SIZE, TritonConfig - - -def test_triton_config_raise_with_positional_args(): - with pytest.raises(TypeError, match="TritonConfig initialization can't be used with positional arguments"): - TritonConfig("CustomId", "/tmp", False) - - -def test_triton_config_serialization_handles_lists(): - config = TritonConfig(cache_config=["local,size=1048576", "redis,size=10485760"]) - - triton_server_config = TritonServerConfig() - for name, value in config.to_dict().items(): - if name not in TritonServerConfig.allowed_keys() or value is None: - continue - - triton_server_config[name] = value - - cli = triton_server_config.to_cli_string() - assert "--cache-config=local,size=1048576" in cli - assert "--cache-config=redis,size=1048576" in cli - - -def test_triton_config_serialize_backend_configuration(): - config = PythonBackendConfig() - config["shm_default_byte_size"] = INITIAL_BACKEND_SHM_SIZE - cli_backend = config.to_list_args() - assert [f"python,shm-default-byte-size={INITIAL_BACKEND_SHM_SIZE}"] == cli_backend - - triton_server_config = TritonServerConfig() - triton_server_config["backend-config"] = cli_backend - cli = triton_server_config.to_cli_string() - - assert f"--backend-config=python,shm-default-byte-size={INITIAL_BACKEND_SHM_SIZE}" == cli diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_triton_server.py b/stf/stf-api-alternative/pytriton/tests/unit/test_triton_server.py deleted file mode 100644 index c9dcdad259061cc20d7ab89c505f1c71071765ab..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_triton_server.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from pytriton.server.triton_server import TritonServer -from pytriton.server.triton_server_config import TritonServerConfig -from pytriton.triton import TRITONSERVER_DIST_DIR -from pytriton.utils.distribution import get_libs_path - - -def test_triton_endpoints(): - config = TritonServerConfig() - config.update_config({"grpc_port": 8011, "http_port": 8010, "model-repository": "/tmp/repo"}) - triton_server = TritonServer( - path=TRITONSERVER_DIST_DIR / "bin/tritonserver", - libs_path=get_libs_path(), - config=config, - ) - assert triton_server.get_endpoint("grpc") == "grpc://127.0.0.1:8011" - assert triton_server.get_endpoint("http") == "http://127.0.0.1:8010" - assert triton_server.get_endpoint("metrics") == "http://127.0.0.1:8002" - - config = TritonServerConfig() - config.update_config({"grpc_address": "192.168.0.1", "model-repository": "/tmp/repo"}) - triton_server = triton_server = TritonServer( - path=TRITONSERVER_DIST_DIR / "bin/tritonserver", - libs_path=get_libs_path(), - config=config, - ) - assert triton_server.get_endpoint("grpc") == "grpc://192.168.0.1:8001" - assert triton_server.get_endpoint("http") == "http://127.0.0.1:8000" - assert triton_server.get_endpoint("metrics") == "http://127.0.0.1:8002" - - config = TritonServerConfig() - config.update_config({"http_address": "192.168.0.1", "model-repository": "/tmp/repo"}) - triton_server = triton_server = TritonServer( - path=TRITONSERVER_DIST_DIR / "bin/tritonserver", - libs_path=get_libs_path(), - config=config, - ) - assert triton_server.get_endpoint("grpc") == "grpc://127.0.0.1:8001" - assert triton_server.get_endpoint("http") == "http://192.168.0.1:8000" - assert triton_server.get_endpoint("metrics") == "http://192.168.0.1:8002" - - config = TritonServerConfig() - config.update_config({"http_address": "0.0.0.0", "model-repository": "/tmp/repo"}) - triton_server = triton_server = TritonServer( - path=TRITONSERVER_DIST_DIR / "bin/tritonserver", libs_path=get_libs_path(), config=config - ) - assert triton_server.get_endpoint("grpc") == "grpc://127.0.0.1:8001" - assert triton_server.get_endpoint("http") == "http://127.0.0.1:8000" - assert triton_server.get_endpoint("metrics") == "http://127.0.0.1:8002" - - config = TritonServerConfig() - config.update_config({"http_address": "0.00.00.000", "model-repository": "/tmp/repo"}) - triton_server = triton_server = TritonServer( - path=TRITONSERVER_DIST_DIR / "bin/tritonserver", libs_path=get_libs_path(), config=config - ) - assert triton_server.get_endpoint("grpc") == "grpc://127.0.0.1:8001" - assert triton_server.get_endpoint("http") == "http://127.0.0.1:8000" - assert triton_server.get_endpoint("metrics") == "http://127.0.0.1:8002" diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_utils_dataclasses.py b/stf/stf-api-alternative/pytriton/tests/unit/test_utils_dataclasses.py deleted file mode 100644 index c3289f231a49a3aead64db8b62ecdab2c0184731..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_utils_dataclasses.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import dataclasses - -import pytest - -from pytriton.utils.dataclasses import kwonly_dataclass - - -def test_kwonly_dataclasses(): - @kwonly_dataclass - @dataclasses.dataclass - class A: - a: int - b: int = 0 - c: str = "hello" - - a = A(a=1, b=2, c="world") - with pytest.raises(TypeError): - a = A(1, 2, "world") - - assert isinstance(a, A) - assert dataclasses.is_dataclass(a) diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_utils_distribution.py b/stf/stf-api-alternative/pytriton/tests/unit/test_utils_distribution.py deleted file mode 100644 index 5794a01192f4b41d4229c5fc7140237bf033fbfa..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_utils_distribution.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import pathlib -import site - -from pytriton.utils.distribution import get_libs_path, get_root_module_path, is_editable_install - - -def test_is_editable_install(mocker): - """Test if pytriton is installed in editable mode.""" - mocker.patch("pytriton.utils.distribution.get_root_module_path", return_value=pathlib.Path("/home/user/pytriton")) - assert is_editable_install() - - mocker.patch( - "pytriton.utils.distribution.get_root_module_path", - return_value=pathlib.Path(f"{site.getsitepackages()[0]}/pytriton"), - ) - assert not is_editable_install() - - mocker.patch( - "pytriton.utils.distribution.get_root_module_path", - return_value=pathlib.Path(f"{site.getusersitepackages()}/pytriton"), - ) - assert not is_editable_install() - - -def test_get_root_module_path(mocker): - """Test obtaining path to pytriton module.""" - expected_value = pathlib.Path("/home/user/pytriton") - - # mock obtaining path of pytriton/utils/distribution.py file - mocker.patch( - "pytriton.utils.distribution.pathlib.Path", - return_value=pathlib.Path("/home/user/pytriton/utils/distribution.py"), - ) - assert get_root_module_path() == expected_value - - -def test_get_libs_path(mocker): - """Test obtaining path to directory with external libraries required by Triton.""" - mocker.patch("pytriton.utils.distribution.get_root_module_path", return_value=pathlib.Path("/home/user/pytriton")) - assert get_libs_path() == pathlib.Path("/home/user/pytriton/tritonserver/external_libs") - - mocker.patch( - "pytriton.utils.distribution.get_root_module_path", - return_value=pathlib.Path(f"{site.getsitepackages()[0]}/pytriton"), - ) - assert get_libs_path() == pathlib.Path(f"{site.getsitepackages()[0]}/nvidia_pytriton.libs") - - mocker.patch( - "pytriton.utils.distribution.get_root_module_path", - return_value=pathlib.Path(f"{site.getusersitepackages()}/pytriton"), - ) - assert get_libs_path() == pathlib.Path(f"{site.getusersitepackages()}/nvidia_pytriton.libs") diff --git a/stf/stf-api-alternative/pytriton/tests/unit/test_workspace.py b/stf/stf-api-alternative/pytriton/tests/unit/test_workspace.py deleted file mode 100644 index e8e029ea53a46ee4e2b2cbc7efba932271a06e74..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/test_workspace.py +++ /dev/null @@ -1,144 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import multiprocessing -import os.path -import pathlib -import tempfile - -import pytest - - -def test_workspace_exist_and_empty_when_created(): - from pytriton.utils.workspace import Workspace - - with tempfile.TemporaryDirectory() as tempdir: - tempdir = pathlib.Path(tempdir) - workspace = Workspace(tempdir / "workspace") - - assert workspace._workspace_path.exists() - assert workspace.exists() - assert workspace.is_empty() - assert len(os.listdir(workspace._workspace_path)) == 0 - assert workspace.path == workspace._workspace_path - - -def test_workspace_not_exist_and_empty_when_removed(): - from pytriton.utils.workspace import Workspace - - with tempfile.TemporaryDirectory() as tempdir: - tempdir = pathlib.Path(tempdir) - workspace = Workspace(tempdir / "workspace") - - os.rmdir(workspace._workspace_path) - assert not workspace.exists() - assert workspace.is_empty() - - -def test_workspace_initializer_raises_error_when_workspace_directory_already_exists(): - from pytriton.utils.workspace import Workspace - - with tempfile.TemporaryDirectory() as tempdir: - tempdir = pathlib.Path(tempdir) - workspace = Workspace(tempdir / "workspace") - - # Exception should be raised at second creation in the same path - with pytest.raises(FileExistsError): - _ = Workspace(workspace.path) - - -def test_workspace_clean(): - from pytriton.utils.workspace import Workspace - - with tempfile.TemporaryDirectory() as tempdir: - tempdir = pathlib.Path(tempdir) - workspace = Workspace(tempdir / "workspace") - - open(workspace.path / "file.txt", "w").close() - (workspace.path / "dir").mkdir() - assert not workspace.is_empty() - - workspace.clean() - assert workspace.is_empty() - assert not workspace.exists() - - # No exception should be raised at second clean - workspace.clean() - assert not workspace.exists() - - -def test_tmp_workspace_exist_when_created(): - import pytriton.constants as constants - from pytriton.utils.workspace import Workspace - - with tempfile.TemporaryDirectory() as tempdir: - constants.PYTRITON_HOME = pathlib.Path(tempdir) - workspace = Workspace() - assert workspace.exists() - - -def test_tmp_workspace_not_exist_when_deleted(): - import pytriton.constants as constants - from pytriton.utils.workspace import Workspace - - with tempfile.TemporaryDirectory() as tempdir: - constants.PYTRITON_HOME = pathlib.Path(tempdir) - workspace = Workspace() - assert workspace.exists() - p = workspace.path - del workspace - assert not p.exists() - - -def test_tmp_workspace_not_exist_when_cleaned(): - import pytriton.constants as constants - from pytriton.utils.workspace import Workspace - - with tempfile.TemporaryDirectory() as tempdir: - constants.PYTRITON_HOME = pathlib.Path(tempdir) - workspace = Workspace() - assert workspace.exists() - workspace.clean() - assert not workspace.exists() - - -def _check(pytriton_home=None): - if pytriton_home is not None: - os.environ["PYTRITON_HOME"] = pytriton_home - else: - pytriton_home = pathlib.Path.home() / ".cache" / "pytriton" - - from pytriton.constants import PYTRITON_HOME - - assert PYTRITON_HOME == pathlib.Path(pytriton_home), f"{PYTRITON_HOME} != {pytriton_home}" - - from pytriton.utils.workspace import Workspace - - workspace = Workspace() - # below line will raise ValueError if workspace is not in pytriton_home - workspace.path.relative_to(pytriton_home) - - -def test_use_env_variable_to_set_pytriton_home(): - ctx = multiprocessing.get_context("spawn") - - process = ctx.Process(target=_check) # no pytriton_home environment - process.start() - process.join() - assert process.exitcode == 0 - - with tempfile.TemporaryDirectory() as tempdir: - process = ctx.Process(target=_check, args=(tempdir,)) - process.start() - process.join() - assert process.exitcode == 0 diff --git a/stf/stf-api-alternative/pytriton/tests/unit/utils.py b/stf/stf-api-alternative/pytriton/tests/unit/utils.py deleted file mode 100644 index 0e4dc7e54145c52c2302a30786b82c5b66007205..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/unit/utils.py +++ /dev/null @@ -1,143 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import json -import typing - -import numpy as np -import tritonclient.grpc -import tritonclient.http -import tritonclient.utils - -from pytriton.model_config.generator import ModelConfigGenerator -from pytriton.model_config.triton_model_config import TritonModelConfig - - -def verify_equalness_of_dicts_with_ndarray(a_dict, b_dict): - assert a_dict.keys() == b_dict.keys(), f"{a_dict} != {b_dict}" - for output_name in a_dict: - assert isinstance( - a_dict[output_name], type(b_dict[output_name]) - ), f"type(a[{output_name}])={type(a_dict[output_name])} != type(b[{output_name}])={type(b_dict[output_name])}" - if isinstance(a_dict[output_name], np.ndarray): - assert a_dict[output_name].dtype == b_dict[output_name].dtype - assert a_dict[output_name].shape == b_dict[output_name].shape - if np.issubdtype(a_dict[output_name].dtype, np.number): - assert np.allclose(b_dict[output_name], a_dict[output_name]) - else: - assert np.array_equal(b_dict[output_name], a_dict[output_name]) - else: - assert a_dict[output_name] == b_dict[output_name] - - -def wrap_to_grpc_infer_result( - model_config: TritonModelConfig, request_id: str, outputs_dict: typing.Dict[str, np.ndarray] -): - raw_output_contents = [output_data.tobytes() for output_data in outputs_dict.values()] - return tritonclient.grpc.InferResult( - tritonclient.grpc.service_pb2.ModelInferResponse( - model_name=model_config.model_name, - model_version=str(model_config.model_version), - id=request_id, - outputs=[ - tritonclient.grpc.service_pb2.ModelInferResponse.InferOutputTensor( - name=output_name, - datatype=tritonclient.utils.np_to_triton_dtype(output_data.dtype), - shape=output_data.shape, - ) - for output_name, output_data in outputs_dict.items() - ], - raw_output_contents=raw_output_contents, - ) - ) - - -def wrap_to_http_infer_result( - model_config: TritonModelConfig, request_id: str, outputs_dict: typing.Dict[str, np.ndarray] -): - raw_output_contents = [output_data.tobytes() for output_data in outputs_dict.values()] - buffer = b"".join(raw_output_contents) - - content = { - "outputs": [ - { - "name": name, - "datatype": tritonclient.utils.np_to_triton_dtype(output_data.dtype), - "shape": list(output_data.shape), - "parameters": {"binary_data_size": len(output_data.tobytes())}, - } - for name, output_data in outputs_dict.items() - ] - } - header = json.dumps(content).encode("utf-8") - response_body = header + buffer - - return tritonclient.http.InferResult.from_response_body(response_body, False, len(header)) - - -def extract_array_from_grpc_infer_input(input_: tritonclient.grpc.InferInput): - np_array = np.frombuffer(input_._raw_content, dtype=tritonclient.utils.triton_to_np_dtype(input_.datatype())) - np_array = np_array.reshape(input_.shape()) - return np_array - - -def extract_array_from_http_infer_input(input_: tritonclient.http.InferInput): - np_array = np.frombuffer(input_._raw_data, dtype=tritonclient.utils.triton_to_np_dtype(input_.datatype())) - np_array = np_array.reshape(input_.shape()) - return np_array - - -def patch_grpc_client__server_up_and_ready(mocker): - mocker.patch.object(tritonclient.grpc.InferenceServerClient, "is_server_ready").return_value = True - mocker.patch.object(tritonclient.grpc.InferenceServerClient, "is_server_live").return_value = True - - -def patch_http_client__server_up_and_ready(mocker): - mocker.patch.object(tritonclient.http.InferenceServerClient, "is_server_ready").return_value = True - mocker.patch.object(tritonclient.http.InferenceServerClient, "is_server_live").return_value = True - - -def patch_grpc_client__model_up_and_ready(mocker, model_config: TritonModelConfig): - from google.protobuf import json_format # pytype: disable=pyi-error - from tritonclient.grpc import model_config_pb2, service_pb2 # pytype: disable=pyi-error - - mock_get_repo_index = mocker.patch.object(tritonclient.grpc.InferenceServerClient, "get_model_repository_index") - mock_get_repo_index.return_value = service_pb2.RepositoryIndexResponse( - models=[ - service_pb2.RepositoryIndexResponse.ModelIndex( - name=model_config.model_name, version="1", state="READY", reason="" - ), - ] - ) - - mocker.patch.object(tritonclient.grpc.InferenceServerClient, "is_model_ready").return_value = True - - model_config_dict = ModelConfigGenerator(model_config).get_config() - model_config_protobuf = json_format.ParseDict(model_config_dict, model_config_pb2.ModelConfig()) - response = service_pb2.ModelConfigResponse(config=model_config_protobuf) - response_dict = json.loads(json_format.MessageToJson(response, preserving_proto_field_name=True)) - mock_get_model_config = mocker.patch.object(tritonclient.grpc.InferenceServerClient, "get_model_config") - mock_get_model_config.return_value = response_dict - - -def patch_http_client__model_up_and_ready(mocker, model_config: TritonModelConfig): - mock_get_repo_index = mocker.patch.object(tritonclient.http.InferenceServerClient, "get_model_repository_index") - mock_get_repo_index.return_value = [ - {"name": model_config.model_name, "version": "1", "state": "READY", "reason": ""} - ] - - mocker.patch.object(tritonclient.http.InferenceServerClient, "is_model_ready").return_value = True - - model_config_dict = ModelConfigGenerator(model_config).get_config() - mock_get_model_config = mocker.patch.object(tritonclient.http.InferenceServerClient, "get_model_config") - mock_get_model_config.return_value = model_config_dict diff --git a/stf/stf-api-alternative/pytriton/tests/utils.py b/stf/stf-api-alternative/pytriton/tests/utils.py deleted file mode 100644 index ed823462fdbfb58430956d753e5f1bd8c5d686b6..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tests/utils.py +++ /dev/null @@ -1,536 +0,0 @@ -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import contextlib -import fcntl -import logging -import os -import pathlib -import re -import select -import socket -import subprocess -import threading -import typing - -LOGGER = logging.getLogger(__name__) -DEFAULT_LOG_FORMAT = "%(asctime)s - %(levelname)8s - %(process)8d - %(threadName)s - %(name)s: %(message)s" - - -def _read_outputs(_process, _logger, _outputs): - # Set stdout and stderr file descriptors to non-blocking mode - try: - fcntl.fcntl(_process.stdout, fcntl.F_SETFL, os.O_NONBLOCK) - fcntl.fcntl(_process.stderr, fcntl.F_SETFL, os.O_NONBLOCK) - except ValueError: # when selecting on closed files - return - - buffers = {_process.stdout: "", _process.stderr: ""} - rds = [_process.stdout, _process.stderr] - while rds: - try: - readable, _, _ = select.select(rds, [], [], 1) - except ValueError: # when selecting on closed files - break - - for rd in readable: - try: - data = os.read(rd.fileno(), 4096) - if not data: - rds.remove(rd) - continue - - decoded_data = data.decode("utf-8") - buffers[rd] += decoded_data - lines = buffers[rd].splitlines(keepends=True) - - if buffers[rd].endswith("\n"): - complete_lines = lines - buffers[rd] = "" - else: - complete_lines = lines[:-1] - buffers[rd] = lines[-1] - - for line in complete_lines: - line = line.rstrip() - _logger.info(line) - _outputs.append(line) - except OSError: # Reading from an empty non-blocking file - pass - - -class ScriptThread(threading.Thread): - def __init__(self, cmd, workdir=None, group=None, target=None, name=None, args=(), kwargs=None) -> None: - super().__init__(group, target, name, args, kwargs, daemon=True) - self.cmd = cmd - self.workdir = workdir - self._process_spawned_or_spawn_error_flag = None - self.active = False - self._process = None - self.returncode = None - self._output = [] - self._logger = logging.getLogger(self.name) - - def __enter__(self): - self.start(threading.Event()) - self._process_spawned_or_spawn_error_flag.wait() - return self - - def __exit__(self, *args): - self.stop() - self.join() - self._process_spawned_or_spawn_error_flag = None - - def start(self, flag: typing.Optional[threading.Event] = None) -> None: - if flag is None: - flag = threading.Event() - self._logger.info(f"Starting {self.name} script with \"{' '.join(self.cmd)}\" cmd") - self._process_spawned_or_spawn_error_flag = flag - super().start() - - def stop(self): - self._logger.info(f"Stopping {self.name} script") - self.active = False - - def run(self): - import psutil - - self.returncode = None - self._output = [] - self._process = None - - os.environ.setdefault("PYTHONUNBUFFERED", "1") # to not buffer logs - try: - with psutil.Popen( - self.cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=0, cwd=self.workdir - ) as process: - self._process = process - self.active = True - if self._process_spawned_or_spawn_error_flag: - self._process_spawned_or_spawn_error_flag.set() - while self.active and process.poll() is None and process.returncode is None: - try: - _read_outputs(process, self._logger, self._output) - except KeyboardInterrupt: - self.stop() - - finally: - if self._process_spawned_or_spawn_error_flag: - self._process_spawned_or_spawn_error_flag.set() - if self.process: - while self.process.poll() is None: - _read_outputs(self.process, self._logger, self._output) - _read_outputs(self.process, self._logger, self._output) - self.returncode = process.wait() # pytype: disable=name-error - self._logger.info(f"{self.name} process finished with {self.returncode}") - - self.active = False - self._process = None - - @property - def output(self): - return "\n".join(self._output) - - @property - def process(self): - return self._process - - -def find_free_port() -> int: - with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s: - s.bind(("", 0)) - s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - return s.getsockname()[1] - - -class ProcessMonitoring: - """A class that dumps the state of a process and its children. - - This class uses the py-spy tool to dump the stack trace of a process and its - children recursively. It also dumps the process information such as the parent - and the command line. It allows registering custom monitors that can perform - additional actions on the process. - - Attributes: - _logger (logging.Logger): The logger object to write messages. - _process (psutil.Process): The process object to monitor. - _children_processes (list[psutil.Process]): The list of child processes to monitor. - _log (logging.Logger.method): The logging method to use for messages. - _remove_color (bool): Whether to remove ANSI escape sequences from the output. - _ansi_escape (re.Pattern): The regular expression object to match ANSI escape sequences. - _custom_monitors (list[typing.Callable[[int], None]]): The list of custom monitor functions to execute on each dump cycle. - """ - - def __init__( - self, - pid: int, - logger: typing.Optional[logging.Logger] = None, - loglevel: int = logging.INFO, - remove_color: bool = False, - ): - """Initializes the ProcessMonitoring object. - - Args: - pid (int): The process ID of the process to monitor. - logger (typing.Optional[logging.Logger], optional): The logger object to write messages. Defaults to None. - loglevel (int, optional): The logging level to use for messages. Defaults to logging.INFO. - remove_color (bool, optional): Whether to remove ANSI escape sequences from the output. Defaults to False. - """ - import re - - import psutil - - self._logger = logger or logging.getLogger("monitoring") - self._process = psutil.Process(pid) - self._children_processes = list(self._process.children(recursive=True)) - self._log = { - logging.DEBUG: self._logger.debug, - logging.INFO: self._logger.info, - logging.WARNING: self._logger.warning, - logging.ERROR: self._logger.error, - }[loglevel] - self._log(f"Initial list of children processes: {self._children_processes}") - self._remove_color = remove_color - pattern = r"\x1b\[.*?m" - self._ansi_escape = re.compile(pattern) - self._custom_monitors = [] - - def register_custom_monitor(self, custom_monitor: typing.Callable[[int], None]) -> None: - """Registers a custom monitor for the process. - - This method adds a custom monitor function to the list of monitors that are - executed on each dump cycle. A custom monitor function should take an integer - as an argument (the process ID) and return None. - - Args: - custom_monitor (typing.Callable[[int], None]): The custom monitor function to register. - """ - self._custom_monitors.append(custom_monitor) - - def dump_state(self) -> None: - """Dumps the state of the process and its children. - - This method calls the _dump_processes_stacktrace and _dump_child_processes - methods to dump the stack trace and the process information of the process - and its children recursively. - """ - self._dump_processes_stacktrace() - self._dump_child_processes() - - def _dump_processes_stacktrace(self): - import psutil - import sh - - self._log("==== Dump process stacktrace") - pyspy_cmd = sh.Command("py-spy") - - for process in [self._process] + self.children: - try: - result = pyspy_cmd("dump", "-ll", "--nonblocking", "-p", str(process.pid)) - if self._remove_color: - result = self._ansi_escape.sub("", str(result)) - self._log(f"Dump stack trace for process (pid={process.pid}) with cmd {process.cmdline()}") - for custom_monitor in self._custom_monitors: - custom_monitor(process.pid) - self._log(result) - except psutil.NoSuchProcess as e: - self._log(f"Error during handling process: {e}") - except sh.ErrorReturnCode_1 as e: - self._log(f"Error during calling py-spy process: {e}") - - def _dump_child_processes(self): - import psutil - - self._log("==== Dump process info (with its children)") - for process in [self._process] + self.children: - try: - self._log(f"{process} parent={process.parent()} ") - except psutil.NoSuchProcess: - self._log(f"{process} is missing in process table") - - @property - def children(self): - """Returns the list of child processes to monitor. - - This property returns the list of child processes to monitor, and updates it - with any new children that are created by the process. - - Returns: - list[psutil.Process]: The list of child processes to monitor. - """ - import psutil - - try: - children = list(self._process.children(recursive=True)) - self._children_processes = list(set(self._children_processes + children)) - except psutil.NoSuchProcess: - pass - return self._children_processes - - -def get_current_container_version(): - container_version = os.environ.get("NVIDIA_PYTORCH_VERSION") or os.environ.get("NVIDIA_TENSORFLOW_VERSION") - if container_version and "-" in container_version: - container_version = container_version.split("-")[0] # TF version has format - - return container_version - - -def verify_docker_image_in_readme_same_as_tested(readme_path, image_name_with_version): - image_name, image_version = image_name_with_version.split(":") - framework_name = image_name.split("/")[-1] - readme_payload = pathlib.Path(readme_path).read_text() - match_iterator = re.finditer( - rf"(?P[\w/.\-:]+)/{framework_name}:(?P[\w.-]+)", - readme_payload, - ) - for entry in match_iterator: - assert entry.group() == image_name_with_version, f"{entry.group()} != {image_name_with_version}" - - -def search_warning_on_too_verbose_log_level(logs: str): - pattern = r"Triton Inference Server is running with enabled verbose logs.*It may affect inference performance." - return re.search(pattern, logs) - - -class ProcessMonitoringThread: - """A class that creates a thread to monitor a process. - - This class uses the ProcessMonitoring class to dump the state of a process - and its children periodically. It also allows registering custom monitors - that can perform additional actions on the process. - - Attributes: - _monitoring (ProcessMonitoring): The ProcessMonitoring object that handles the dumping logic. - _stop_event (threading.Event): The event object that signals the thread to stop its loop. - _thread (threading.Thread): The thread object that runs the _run method in a loop. - _interval (float): The interval in seconds between each dump cycle. - """ - - def __init__(self, monitoring: ProcessMonitoring, interval: float = 60): - """Initializes the ProcessMonitoringThread object. - - Args: - monitoring (ProcessMonitoring): The ProcessMonitoring object that handles the dumping logic. - interval (float, optional): The interval in seconds between each dump cycle. Defaults to 60. - """ - self._monitoring = monitoring - self._interval = interval - - def start(self) -> None: - """Starts the monitoring thread. - - This method creates a new thread that runs the _run method in a loop until - the stop method is called or an exception occurs. It also sets the stop event - object that can be used to signal the thread to stop gracefully. - """ - self._stop_event = threading.Event() - self._thread = threading.Thread(target=self._run, daemon=True) - self._thread.start() - - def stop(self) -> None: - """Stops the monitoring thread. - - This method sets the stop event object that signals the thread to stop its loop. - It also waits for the thread to join before returning. - """ - self._stop_event.set() - self._thread.join() - - def __enter__(self): - """Enters the context manager for the monitoring thread.""" - self.start() - return self - - def __exit__(self, *args): - """Exits the context manager for the monitoring thread.""" - self.stop() - - def _run(self): - logging.info("Monitoring process") - self._monitoring.dump_state() - while not self._stop_event.wait(self._interval): - logging.info("Monitoring process") - self._monitoring.dump_state() - - -class TestMonitoringContext: - """A context manager that monitors test processes. - - This context manager creates threads to monitor the test processes and dumps - their state periodically. It can extend argparse args with additional arguments. - It supports splitting log into different files. The standard output log can have one level - and the file log can have another level. It uses log rotation. - """ - - @staticmethod - def extend_args(parser): - parser.add_argument( - "--verbose", - action="store_true", - help="Provide verbose logs", - ) - parser.add_argument( - "--log-path", - type=str, - default=None, - help="Provide the path of external log for rotation", - ) - parser.add_argument( - "--compress-logs", - action="store_true", - help="Enable logs compression", - ) - parser.add_argument( - "--maximum-log-file", - type=int, - default=10 * 1024 * 1024, - help="Maximum logfile size before rotation is started", - required=False, - ) - parser.add_argument( - "--enable-fault-handler", - action="store_true", - help="Enable faulthandler", - ) - parser.add_argument( - "--faulthandler-interval", - type=float, - default=None, - help="Enable faulthandler after specified number of seconds with repeat", - required=False, - ) - parser.add_argument( - "--process-monitoring-interval", - type=float, - default=None, - help="Enable process monitoring after specified number of seconds with repeat", - required=False, - ) - - def __init__(self, args): - """Initializes the TestMonitoringContext object. - - Args: - args (argparse.Namespace): The argparse args object to extend with additional arguments. - """ - self._args = args - - def __enter__(self): - import faulthandler - import logging.handlers - - args = self._args - self._loglevel = log_level = logging.DEBUG if args.verbose else logging.INFO - logging.basicConfig(level=logging.DEBUG, format=DEFAULT_LOG_FORMAT) - logger = logging.getLogger() - - if args.log_path is not None: - # Create a rotating file handler for the file output logger - # The file name is based on the log path argument, the maximum size is 10 MB, and the maximum number of files is 500 - file_handler = logging.handlers.RotatingFileHandler( - args.log_path, maxBytes=args.maximum_log_file, backupCount=500 - ) - file_handler.setFormatter(logging.Formatter(DEFAULT_LOG_FORMAT)) - file_handler.setLevel(logging.DEBUG) - if args.compress_logs: - file_handler.namer = lambda name: name + ".gz" - - def gzip_rotation(source, dest): - import gzip - import os - - with open(source, "rb") as f_in: - with gzip.open(dest, "wb") as f_out: - f_out.writelines(f_in) - os.remove(source) - - file_handler.rotator = gzip_rotation - - # Add the file handler to the default logger - logger.addHandler(file_handler) - # Get the stream handler that was created by basicConfig - - # Get the stream handler that was created by basicConfig - stream_handler = logger.handlers[0] - # Set the stream handler's level to match the log level argument - stream_handler.setLevel(log_level) - - if args.enable_fault_handler: - faulthandler.enable() - - if args.faulthandler_interval is not None: - faulthandler.dump_traceback_later(args.faulthandler_interval, repeat=True, exit=False) - - custom_monitors = [] - - import os - - import psutil - - def monitor_ram_usage(pid=None): - if pid is None: - pid = os.getpid() - - process = psutil.Process(pid) - logger.debug(f"MONITOR RAM USAGE ({pid}): {process.memory_info()}") - - custom_monitors.append(monitor_ram_usage) - - def monitor_file_descriptors(pid=None): - if pid is None: - pid = os.getpid() - - process = psutil.Process(pid) - logger.debug(f"MONITOR FILE DESCRIPTORS ({pid}): {process.num_fds()}") - - custom_monitors.append(monitor_file_descriptors) - - def monitor_cpu_usage(pid=None): - if pid is None: - pid = os.getpid() - - process = psutil.Process(pid) - logger.debug(f"MONITOR CPU USAGE ({pid}): {process.cpu_percent()}") - - custom_monitors.append(monitor_cpu_usage) - - def monitor_threads(pid=None): - if pid is None: - pid = os.getpid() - - process = psutil.Process(pid) - logger.debug(f"MONITOR THREADS ({pid}): {process.num_threads()}") - - custom_monitors.append(monitor_threads) - - def monitor_process_dict(pid=None): - if pid is None: - pid = os.getpid() - - process = psutil.Process(pid) - logger.debug(f"MONITOR PROCESS DICT ({pid}): {process.as_dict()}") - - custom_monitors.append(monitor_process_dict) - if args.process_monitoring_interval is not None: - monitoring = ProcessMonitoring(os.getpid(), logger, loglevel=logging.DEBUG, remove_color=True) - for monitor in custom_monitors: - monitoring.register_custom_monitor(monitor) - - self._monitor = ProcessMonitoringThread(monitoring, interval=args.process_monitoring_interval) - self._monitor.start() - return self - - def __exit__(self, *args): - if hasattr(self, "_monitor"): - self._monitor.stop() - self._monitor = None diff --git a/stf/stf-api-alternative/pytriton/tox.ini b/stf/stf-api-alternative/pytriton/tox.ini deleted file mode 100644 index b2f7d3473db7b0488a365cd5253a5bc2100d2459..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/pytriton/tox.ini +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -[tox] -envlist = py38, py39, py310, py311, codeblocks, pytype, pre-commit -isolated_build = True - -[testenv] -setenv = - PIP_EXTRA_INDEX_URL=https://pypi.ngc.nvidia.com -commands = - {envbindir}/pytest --import-mode append --durations=0 -vv --basetemp={envtmpdir} tests/unit -extras = - test - -[testenv:codeblocks] -commands = - {envbindir}/pytest --import-mode append --durations=0 -vv --basetemp={envtmpdir} --timeout=120 --codeblocks docs README.md -deps = - torch - tensorflow - transformers - -[testenv:pytype] -basepython = python -commands = pytype --keep-going -j auto pytriton tests examples -x pytriton/tritonserver/backends/python/platform_handlers - -[testenv:pre-commit] -basepython = python -commands = pre-commit run -a diff --git a/stf/stf-api-alternative/src/stf_alternative/.ipynb_checkpoints/compose-checkpoint.py b/stf/stf-api-alternative/src/stf_alternative/.ipynb_checkpoints/compose-checkpoint.py deleted file mode 100644 index 9ab5ae8dd15e38fd013d3b20e885ba32ec3d1da7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/.ipynb_checkpoints/compose-checkpoint.py +++ /dev/null @@ -1,675 +0,0 @@ -from pathlib import Path - -import cv2 -import numpy as np -import pandas as pd - - -def maskblur(mask, kernel_size, sigma=1): - mask_blur = cv2.GaussianBlur(mask, (kernel_size, kernel_size), sigma) - return mask_blur - - -def erosion(mask, kernel_size): - kernel = np.ones((kernel_size, kernel_size), np.uint8) - erosion_image = cv2.erode(mask, kernel, iterations=1) # // make erosion image - return erosion_image - - -def dilate(mask, kernel_size): - kernel = np.ones((kernel_size, kernel_size), np.uint8) - erosion_image = cv2.dilate(mask, kernel, iterations=1) # // make erosion image - return erosion_image - - -def resize_adapt(model_out, crop_region): - def inter_alg(target_size, img): - if isinstance(target_size, tuple): - w, h = target_size - else: - w, h = target_size, target_size - return inter_alg_(w, h, img) - - def inter_alg_(w, h, img): - if w * h < img.shape[0] * img.shape[1]: - return cv2.INTER_AREA - else: - return cv2.INTER_CUBIC - - x1, y1, x2, y2 = crop_region - h, w = y2 - y1 + 1, x2 - x1 + 1 - sz = model_out.shape[0] # h,w 동일하다. - if h == sz and w == sz: - return model_out - - r = max(h, w) / sz - max_hw = max(h, w) - temp_ = cv2.resize(model_out, (max_hw, max_hw), inter_alg(max_hw, model_out)) - temp_ = temp_[ - (max_hw - h) // 2 : (max_hw - h) // 2 + h, - (max_hw - w) // 2 : (max_hw - w) // 2 + w, - ] - return temp_ - - -def get_face_mask( - img_size, df_fan_row, blur_ratio=0.3, dilate_ratio=0.2, erosion_ratio=0 -): - assert blur_ratio >= 0 and blur_ratio <= 1 - assert erosion_ratio >= 0 and erosion_ratio <= 1 - assert dilate_ratio >= 0 and dilate_ratio <= 1 - - def _masking(img, pts, value): - img = cv2.fillPoly(img, [pts], value) - return img - - def _get_face_pts_n_box(img_size, df_fan_row): - box = df_fan_row["cropped_box"] - pts2d = df_fan_row["pts2d"] - np.array([box[0], box[1]]) - - if isinstance(df_fan_row["cropped_size"], float): - cropped_size = df_fan_row["cropped_size"] - else: - cropped_size = df_fan_row["cropped_size"][0] - ratio = img_size[0] / cropped_size - pts2d = pts2d * ratio - xs, ys = pts2d[:, 0], pts2d[:, 1] - l, t, r, b = min(xs), min(ys), max(xs), max(ys) - return np.concatenate([pts2d[0:17, :], pts2d[17:27, :][::-1]]).astype( - np.uint832 - ), (l, t, r, b) - - if df_fan_row["pts2d"] is None: - mask = np.zeros((img_size[1], img_size[0]), dtype=np.uint8) - if len(mask.shape) == 2: - mask = np.expand_dims(mask, axis=2) - return {"crop": mask, "origin": 1 - mask} - - pts, box = _get_face_pts_n_box(img_size, df_fan_row) - h = max(box[2] - box[0], box[3] - box[1]) - mask = np.zeros((img_size[1], img_size[0]), dtype=np.uint8) - mask = _masking(mask, pts, (255)) - if dilate_ratio != 0: - mask = dilate(mask, int(h * dilate_ratio) // 2 * 2 + 1) - if erosion_ratio != 0: - mask = erosion(mask, int(h * erosion_ratio) // 2 * 2 + 1) - if blur_ratio != 0: - blur_kernel_size = int(h * blur_ratio) // 2 * 2 + 1 - mask = maskblur(mask, blur_kernel_size, 0) - mask = mask / 255 - if len(mask.shape) == 2: - mask = np.expand_dims(mask, axis=2) - return {"crop": mask, "origin": 1 - mask} - - -def cromakey_green(img): - r = img[:, :, 0] - g = img[:, :, 1] - b = img[:, :, 2] - g_alpha = g > 50 - r_alpha = (g * 1.0) > r - b_alpha = (g * 0.7) > b - alpha = g_alpha & (r_alpha & b_alpha) - alpha = (1 - alpha) * 255 - alpha = alpha.astype(np.uint8) - - alpha = maskblur(alpha, kernel_size=13) - alpha[np.where(alpha > 100)] = 255 - alpha = erosion(alpha, kernel_size=5) - - if len(alpha.shape) == 2: - alpha2 = np.expand_dims(alpha, axis=2) - else: - alpha2 = alpha - new = np.concatenate((img, alpha2), axis=2) - return new - - -def cromakey_green_binary(img): - img = cromakey_green(img) - alpha = img[:, :, 3] - alpha[np.where(alpha <= 128)] = 0 - alpha[np.where(alpha > 128)] = 1 - - -def cromakey_green_hunet_lmy(img): - r = img[:, :, 0] - g = img[:, :, 1] - b = img[:, :, 2] - g_alpha = g > 70 - r_alpha = g > r - b_alpha = (g * 0.8) > b - alpha = g_alpha & (r_alpha & b_alpha) - alpha = (1 - alpha) * 255 - alpha = alpha.astype(np.uint8) - - alpha = maskblur(alpha, kernel_size=11) - alpha[np.where(alpha > 100)] = 255 - alpha = maskblur(alpha, kernel_size=3) - alpha = erosion(alpha, kernel_size=3) - - if len(alpha.shape) == 2: - alpha2 = np.expand_dims(alpha, axis=2) - else: - alpha2 = alpha - new = np.concatenate((img, alpha2), axis=2) - return new - - -# ybm 영상용 크로마키 함수 -def cromakey_green_ybm_front(img): - r, g, b = img[:, :, 0], img[:, :, 1], img[:, :, 2] - - g_alpha = g > 70 - # r_alpha = (g * 0.7) > r - # b_alpha = (g * 0.7) > b - r_alpha = g > r - b_alpha = (g * 0.9) > b - - alpha = g_alpha & (r_alpha & b_alpha) - alpha = (1 - alpha) * 255 - alpha = alpha.astype(np.uint8) - - alpha = maskblur(alpha, kernel_size=11) - alpha[np.where(alpha > 100)] = 255 - alpha = maskblur(alpha, kernel_size=3) - alpha = maskblur(alpha, kernel_size=3) - - grey_alpha = alpha < 255 - g[grey_alpha] = r[grey_alpha] * 0.8 - - if len(alpha.shape) == 2: - alpha2 = np.expand_dims(alpha, axis=2) - else: - alpha2 = alpha - new = np.concatenate((img, alpha2), axis=2) - return new - - -# ybm 영상용 크로마키 함수 -def cromakey_green_ybm_side(img): - img = img.copy() - r, g, b = img[:, :, 0], img[:, :, 1], img[:, :, 2] - g_alpha = g > 50 - r_alpha = g > r - b_alpha = (g * 0.9) > b - - alpha = g_alpha & (r_alpha & b_alpha) - alpha = (1 - alpha) * 255 - alpha = alpha.astype(np.uint8) - - alpha = maskblur(alpha, kernel_size=11) - alpha[np.where(alpha > 100)] = 255 - alpha = maskblur(alpha, kernel_size=3) - alpha = maskblur(alpha, kernel_size=3) - - grey_alpha = alpha < 255 - g[grey_alpha] = r[grey_alpha] * 0.8 - - if len(alpha.shape) == 2: - alpha2 = np.expand_dims(alpha, axis=2) - else: - alpha2 = alpha - new = np.concatenate((img, alpha2), axis=2) - return new - - -# devin 영상용 크로마키 함수 -def cromakey_green_devin_side(img): - img = img.copy() - r, g, b = img[:, :, 0], img[:, :, 1], img[:, :, 2] - g_alpha = g > 70 - r_alpha = (g * 0.8) > r - # r_alpha = g > r - b_alpha = (g * 0.9) > b - - alpha = g_alpha & (r_alpha & b_alpha) - alpha = (1 - alpha) * 255 - alpha = alpha.astype(np.uint8) - - alpha = maskblur(alpha, kernel_size=7, sigma=3) - alpha[np.where(alpha < 150)] = 0 - alpha = maskblur(alpha, kernel_size=5, sigma=2) - - if len(alpha.shape) == 2: - alpha = np.expand_dims(alpha, axis=2) - - new = np.concatenate((img, alpha), axis=2) - return new - - -def get_cromakey_func(args): - if "cromakey" not in args.keys(): - return cromakey_green_hunet_lmy - if "cromakey_green_ybm_front" == args.cromakey: - return cromakey_green_ybm_front - if "cromakey_green_ybm_side" == args.cromakey: - return cromakey_green_ybm_side - if "cromakey_green_devin_side" == args.cromakey: - return cromakey_green_devin_side - - raise "cromakey not found" - - -def compose_default_(model_out, org_image_with_alpha, mask, **kwargs): - # 1. 마스크 섞기 : 원래 비디오의 투명값과 계산한 마스크를 섞는다. - mask = mask[:, :, 0] - mask[np.where(mask > 0)] = 1 # 마스크 영역을 128 -> 1 로 만든다. - model_out[:, :, 3] = ( - org_image_with_alpha[:, :, 3] * (1 - mask) + model_out[:, :, 3] * mask - ) - - # 2. 섞인 마스크가 좀 자연스럽게 섞이도록 함. - model_out[:, :, 3] = maskblur(model_out[:, :, 3], kernel_size=3, sigma=1) - return model_out - - -def compose_devin_(model_out, org_image_with_alpha, mask, debug=False, **kwargs): - mask = mask[:, :, 0] - mask[np.where(mask > 0)] = 1 # 마스크 영역을 128 -> 1 로 만든다. - mask = mask.astype(np.float32) - - # 1. 기존마스크와 경계가 잘 안보이도록 마스크를 부드럽게 만든다. - kernel_size = int(mask.shape[0] * 0.03) // 2 * 2 + 1 # 이미지 크기의 3% 정도 마스크를 확장한다. - if debug: - print( - f"## compose_devin_: kernel_size:{kernel_size}, mask_height:{mask.shape[0]}" - ) - if kernel_size >= 3: - mask = dilate(mask, kernel_size=kernel_size) - mask = maskblur(mask, kernel_size=kernel_size, sigma=kernel_size // 2) - mask = maskblur(mask, kernel_size=kernel_size, sigma=kernel_size // 2) - mask = erosion(mask, kernel_size=3) # 1pixel 만 줄임 - - # 2. 마스크 섞기 : 원래 비디오의 투명값과 계산한 마스크를 섞는다. - model_out[:, :, 3] = ( - org_image_with_alpha[:, :, 3] * (1 - mask) + model_out[:, :, 3] * mask - ) - - # 3. 섞인 마스크가 부드럽게 한번더 블러를 한다. - model_out[:, :, 3] = maskblur(model_out[:, :, 3], kernel_size=3, sigma=1) - - return model_out - - -def get_compose_mask_func(args): - if "cromakey" in args.keys(): - if "cromakey_green_devin_side" == args.cromakey: - return compose_devin_ - if "compose" in args.keys(): - if "compose_smooth" == args.compose: - return compose_devin_ - return compose_default_ - - -def get_keying_func(template): - cromakey_func = get_cromakey_func(template.model.args) - compose_func = get_compose_mask_func(template.model.args) - - def keying_(pred, idx, box=None): - model_out, mask, alpha = pred["pred"], pred["mask"], pred["img_gt_with_alpha"] - - if pred["filename"].endswith("_no.jpg") or pred["filename"].endswith("_no.png"): - return alpha[:, :, [2, 1, 0, 3]] - - if ( - alpha.shape[0] != mask.shape[0] - or alpha.shape[1] != mask.shape[1] - or alpha.shape[0] != model_out.shape[0] - or alpha.shape[1] != model_out.shape[1] - ): - raise Exception( - f"not matched keying shape. " - f"alpha: {alpha.shape[0]}, {alpha.shape[1]}, {alpha.shape[2]}, " - f"mask: {mask.shape[0]}, {mask.shape[1]}, " - f"model_out: {model_out.shape[0]}, {model_out.shape[1]}" - ) - - if box is not None: - model_h = model_out.shape[0] - box_h = box[3] - box[1] - if box_h > model_h: - model_out = resize_adapt(model_out, box) - mask = resize_adapt(mask, box) - alpha = resize_adapt(alpha, box) - model_out = cromakey_func(model_out) - model_out = compose_func( - model_out=model_out, org_image_with_alpha=alpha, mask=mask - ) - - return model_out - - return keying_ - - -def get_box_mask(width, height, config, verbose=False): - def get_mask_( - width, height, gradation_width, gradation_bottom=None, box_mask_erosion=None - ): - mask = np.ones((height, width, 1)) - r = list(range(0, gradation_width, 1)) - for s, e in zip(r, r[1:]): - g = s / gradation_width - # print(f'---- s:{s}, e:{e}, g:{g}') - mask[s:e, s : width - s, :] = g - mask[height - e : height - s, s : width - s, :] = g - mask[s : height - s, s:e, :] = g - mask[s : height - s, width - e : width - s, :] = g - if gradation_bottom is not None: - r = list(range(0, gradation_bottom, 1)) - for s, e in zip(r, r[1:]): - g = s / gradation_bottom - mask[height - e : height - s, s : width - s, :] = g - if box_mask_erosion is not None: - mask = erosion(mask, box_mask_erosion * 2 + 1) - if len(mask.shape) == 2: - mask = np.expand_dims(mask, 2) # mask shape ex: (352,352,1) - - return mask - - gradation_width = int(height * 0.1) - gradation_bottom = ( - int(height * config["gradation_bottom"]) - if "gradation_bottom" in config.keys() - else None - ) - box_mask_erosion = ( - int(height * config["box_mask_erosion"]) - if "box_mask_erosion" in config.keys() - else None - ) - # if verbose: - # print('gradation_width : ', gradation_width) - # print('gradation_bottom : ', gradation_bottom) - # print('box_mask_erosion : ', box_mask_erosion) - mask = get_mask_(width, height, gradation_width, gradation_bottom, box_mask_erosion) - mask_crop = mask - mask_origin = 1 - mask - return {"crop": mask_crop, "origin": mask_origin} - - -def get_compose_func_without_keying_move(template, ratio, verbose=False): - args = template.model.args - df = pd.read_pickle( - f"{template.crop_mp4_dir}/{Path(template.template_video_path).stem}_000/df_fan.pickle" - ) - df = df.set_index("frame_idx") - move_head_box_size = ( - (df.loc[0]["cropped_box"][2] - df.loc[0]["cropped_box"][0] - 20) // 10 * 10 - ) - - def resize_and_scale(model_out, head_box_idx): - # ratio 1.0 에 맞는 크기로 resize 하고, - # 원래 영상에서 10의 배수에 해당하는 위치로 (head_box, model_out) 모두 잘라낸다. - head_box = df["cropped_box"][head_box_idx] - if ratio == 1.0: - return model_out, head_box - - # 일단 원래 크기로 만든다. - model_out = resize_adapt(model_out, head_box) - - # 원래 크기에서의 박스에서 10의 배수에 해당하는 좌표를 찾는다. - l, t = (np.array(head_box[:2]) + 9) // 10 * 10 - new_head_box = np.array( - [l, t, l + move_head_box_size - 1, t + move_head_box_size - 1] - ) # 양쪽포함이라서 1을 빼준다. - - # 10의 배수에 맞춰서 이미지를 잘라낸다. - diff_box = new_head_box - head_box - new_model_out = model_out[diff_box[1] : diff_box[3], diff_box[0] : diff_box[2]] - # if verbose and head_box_idx == 0: - # print('org head_box:', head_box, ', new_head_box:', new_head_box) - # print('alpah2.shape:', model_out.shape, ', new_model_out:', new_model_out.shape) - if ( - new_model_out.shape[0] % 10 != 0 or new_model_out.shape[1] % 10 != 0 - ): # 크기는 10의 배수여야 한다. - raise Exception(f"new_model_out.shape % 10 != 0, {new_model_out.shape}") - - # ratio에 맞는 크기로 변경한다. - x1, y1, _, _ = np.round(new_head_box * ratio).astype(np.uint8) - # 양쪽포함이라서 -1을 해준다. - new_head_box = ( - x1, - y1, - x1 + int(move_head_box_size * ratio) - 1, - y1 + int(move_head_box_size * ratio) - 1, - ) - new_model_out = resize_adapt(new_model_out, new_head_box) - - # if verbose and head_box_idx == 0: - # print('org head_box:', head_box, ', new_head_box:', new_head_box) - # print('alpah2.shape:', model_out.shape, ', new_model_out:', new_model_out.shape) - - return new_model_out, new_head_box - - def compose_one(model_out, full_img, head_box_idx): - model_out, box = resize_and_scale(model_out, head_box_idx) - x1, y1, x2, y2 = box - img = resize_adapt(model_out, (x1, y1, x2, y2)) - if ( - "compose" in template.config.keys() - and template.config.compose == "face_only" - ): - row = df.loc[head_box_idx] - mask_box = get_face_mask( - (img.shape[1], img.shape[0]), row, **get_compose_option(template.config) - ) - else: - mask_box = get_box_mask( - x2 - x1 + 1, y2 - y1 + 1, config=args, verbose=verbose - ) - - if y2 - y1 + 1 != img.shape[0] or x2 - x1 + 1 != img.shape[1]: - raise Exception( - f"not matched compose shape. x2-x1+1: {x2 - x1 + 1}, y2-y1+1:{y2 - y1 + 1}, img: {img.shape[1]}, {img.shape[0]}" - ) - - # Compose the image - if full_img.shape[2] == 3: - alpha = np.zeros_like(full_img[:, :, :1]) - alpha.fill(255) - full_img = np.concatenate([full_img, alpha], axis=2) - - out_memory = full_img.copy() - - alpha = img[:, :, 3] - alpha = cv2.merge([alpha, alpha, alpha]) - - back = out_memory[y1 : y2 + 1, x1 : x2 + 1].copy() - front = img[:, :, 0:3] - - img = np.concatenate( - [np.where(alpha < (255, 255, 255), back[:, :, :3], front), back[:, :, 3:]], - axis=2, - ) - out_memory[y1 : y2 + 1, x1 : x2 + 1] = ( - full_img[y1 : y2 + 1, x1 : x2 + 1] * mask_box["origin"] - + img * mask_box["crop"] - ) - return out_memory - - return compose_one - - -def get_compose_func_without_keying_default(template, ratio, verbose=False): - args = template.model.args - df = pd.read_pickle( - f"{template.crop_mp4_dir}/{Path(template.template_video_path).stem}_000/df_fan.pickle" - ) - # sz = df['cropped_size'].values[0] - # 원래 4k 템플릿에서 축소된 비율만큼 cropped_box 크기를 줄여준다. - x1, y1, x2, y2 = np.round(np.array(df["cropped_box"].values[0]) * ratio).astype( - np.uint8 - ) - del df - mask_box = get_box_mask(x2 - x1 + 1, y2 - y1 + 1, config=args, verbose=verbose) - img_size = args.img_size - if verbose: - print("croped size: ", x2 - x1 + 1, y2 - y1 + 1) - print("croped region(x1,y1,x2,y2): ", x1, y1, x2, y2) - - def compose_one(model_out, full_img, _): - img = resize_adapt(model_out, (x1, y1, x2, y2)) - if y2 - y1 + 1 != img.shape[0] or x2 - x1 + 1 != img.shape[1]: - raise Exception( - f"not matched compose shape. x2-x1+1: {x2 - x1 + 1}, y2-y1+1:{y2 - y1 + 1}, img: {img.shape[1]}, {img.shape[0]}" - ) - - # 붙여넣기 - if full_img.shape[2] == 3: - alpha = np.zeros_like(full_img[:, :, :1]) - alpha.fill(255) - full_img = np.concatenate([full_img, alpha], axis=2) - - out_memory = full_img.copy() - - alpha = img[:, :, 3] - alpha = cv2.merge([alpha, alpha, alpha]) - - back = out_memory[y1 : y2 + 1, x1 : x2 + 1].copy() - front = img[:, :, 0:3] - - img = np.concatenate( - [np.where(alpha < (255, 255, 255), back[:, :, :3], front), back[:, :, 3:]], - axis=2, - ) - - out_memory[y1 : y2 + 1, x1 : x2 + 1] = ( - full_img[y1 : y2 + 1, x1 : x2 + 1] * mask_box["origin"] - + img * mask_box["crop"] - ) - return out_memory - - return compose_one - - -def get_compose_option(config): - blur_ratio = 0.3 - dilate_ratio = 0.2 - erosion_ratio = 0.0 - if "compose_args" in config.keys(): - if "blur_ratio" in config.compose_args.keys(): - blur_ratio = config.compose_args.blur_ratio - if "dilate_ratio" in config.compose_args.keys(): - dilate_ratio = config.compose_args.dilate_ratio - if "erosion_ratio" in config.compose_args.keys(): - erosion_ratio = config.compose_args.erosion_ratio - return { - "blur_ratio": blur_ratio, - "dilate_ratio": dilate_ratio, - "erosion_ratio": erosion_ratio, - } - - -def get_compose_func_without_keying_face_only(template, ratio, verbose=False): - df = pd.read_pickle( - f"{template.crop_mp4_dir}/{Path(template.template_video_path).stem}_000/df_fan.pickle" - ) - x1, y1, x2, y2 = np.round(np.array(df["cropped_box"].values[0]) * ratio).astype( - np.uint8 - ) - - df = df.set_index("frame_idx") - if verbose: - print("get_compose_option") - print(get_compose_option(template.config)) - - def compose_one(model_out, full_img, head_box_idx): - try: - row = df.loc[head_box_idx] - except Exception as e: - print("exception get_compose_func_without_keying_face_only", e) - raise Exception("exception get_compose_func_without_keying_face_only", e) - img = resize_adapt(model_out, (x1, y1, x2, y2)) - if y2 - y1 + 1 != img.shape[0] or x2 - x1 + 1 != img.shape[1]: - raise Exception( - f"not matched compose shape. x2-x1+1: {x2 - x1 + 1}, y2-y1+1:{y2 - y1 + 1}, img: {img.shape[1]}, {img.shape[0]}" - ) - - mask_box = get_face_mask( - (img.shape[1], img.shape[0]), row, **get_compose_option(template.config) - ) - # 붙여넣기 - out_memory = full_img.copy() - out_memory[y1 : y2 + 1, x1 : x2 + 1] = ( - full_img[y1 : y2 + 1, x1 : x2 + 1] * mask_box["origin"] - + img * mask_box["crop"] - ) - return out_memory - - return compose_one - - -# template video 의 frame 과 model inference 결과를 합성하는 함수를 리턴한다. -# params -# ratio : 템플릿 scale 비율. -# 1.0: 템플릿 크기 그대로 -# 0.5: width, height 를 절반으로 줄인 크기 -def get_compose_func_without_keying(template, ratio, verbose=False): - if "move" in template.config.keys() and template.config.move: - return get_compose_func_without_keying_move( - template=template, ratio=ratio, verbose=verbose - ) - if "compose" in template.config.keys() and template.config.compose == "face_only": - return get_compose_func_without_keying_face_only( - template=template, ratio=ratio, verbose=verbose - ) - - return get_compose_func_without_keying_default( - template=template, ratio=ratio, verbose=verbose - ) - - -def compose_direct(box, model_args, ratio, model_out, full_img): - x1, y1, x2, y2 = box - mask_box = get_box_mask(x2 - x1 + 1, y2 - y1 + 1, config=model_args) - img_size = model_args.img_size - - img = resize_adapt(model_out, (x1, y1, x2, y2)) - if y2 - y1 + 1 != img.shape[0] or x2 - x1 + 1 != img.shape[1]: - raise Exception( - f"not matched compose shape. x2-x1+1: {x2 - x1 + 1}, y2-y1+1:{y2 - y1 + 1}, img: {img.shape[1]}, {img.shape[0]}" - ) - - # 붙여넣기 - out_memory = full_img.copy() - out_memory[y1 : y2 + 1, x1 : x2 + 1] = ( - full_img[y1 : y2 + 1, x1 : x2 + 1] * mask_box["origin"] + img * mask_box["crop"] - ) - return out_memory - - -def keying_direct(model_args, pred, box=None): - cromakey_func = get_cromakey_func(model_args) - compose_func = get_compose_mask_func(model_args) - - model_out, mask, alpha = pred["pred"], pred["mask"], pred["img_gt_with_alpha"] - - if pred["filename"].endswith("_no.jpg") or pred["filename"].endswith("_no.png"): - return alpha[:, :, [2, 1, 0, 3]] - - if ( - alpha.shape[0] != mask.shape[0] - or alpha.shape[1] != mask.shape[1] - or alpha.shape[0] != model_out.shape[0] - or alpha.shape[1] != model_out.shape[1] - or alpha.shape[2] != 4 - ): - raise Exception( - f"not matched keying shape. " - f"alpha: {alpha.shape[0]}, {alpha.shape[1]}, {alpha.shape[2]}, " - f"mask: {mask.shape[0]}, {mask.shape[1]}, " - f"model_out: {model_out.shape[0]}, {model_out.shape[1]}" - ) - - if box is not None: - model_h = model_out.shape[0] - box_h = box[3] - box[1] - if box_h > model_h: - model_out = resize_adapt(model_out, box) - mask = resize_adapt(mask, box) - alpha = resize_adapt(alpha, box) - model_out = cromakey_func(model_out) - model_out = compose_func(model_out=model_out, org_image_with_alpha=alpha, mask=mask) - - return model_out diff --git a/stf/stf-api-alternative/src/stf_alternative/.ipynb_checkpoints/inference-checkpoint.py b/stf/stf-api-alternative/src/stf_alternative/.ipynb_checkpoints/inference-checkpoint.py deleted file mode 100644 index 2c497302132a140326cce2e381885101537d23ce..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/.ipynb_checkpoints/inference-checkpoint.py +++ /dev/null @@ -1,270 +0,0 @@ -import asyncio -from itertools import chain - -import numpy as np -import torch -from pydub import AudioSegment, silence - - -def check_split_lengths(silent_ranges, len_audio): - prev_end = 0 - for idx, (start, end) in enumerate(silent_ranges): - if idx < len(silent_ranges) - 1: - if silent_ranges[idx + 1][0] - start > 70000: - return False - else: - if len_audio - start > 70000: - return False - return True - - -def load_and_split_audio_by_silence( - audio_segment, - silence_thresh: int = -75, - min_silence_len: int = 500, - min_chunk_length_ms: int = 40, - seek_step: int = 100, - verbose: bool = False, -): - audio_segment = audio_segment.set_channels(1) - audio_segment = audio_segment.set_frame_rate(16000) - - for st in range(silence_thresh, -50, 5): - for msl in range(min_silence_len, 0, -100): - silent_ranges = silence.detect_silence( - audio_segment, msl, st, seek_step=seek_step - ) - length_ok = check_split_lengths(silent_ranges, len(audio_segment)) - if length_ok: - break - - if len(silent_ranges) > 0 and length_ok: - break - - if ( - len(silent_ranges) == 0 - and len(audio_segment) < 70000 - and len(audio_segment) >= 40 - ): - return [audio_segment] - - assert ( - length_ok and len(silent_ranges) > 0 - ), "Each sentence must be within 70 seconds, including silence" - - audio_chunks = [] - prev_end = 0 - - for idx, (start, end) in enumerate(silent_ranges): - if idx < len(silent_ranges) - 1: - chunk_length = silent_ranges[idx + 1][0] - prev_end - silence_length = end - prev_end - chunk_length_samples = ( - chunk_length * 16 - ) # Convert ms to samples (16000 samples/sec) - - if idx == 0: - target_length_samples = (chunk_length_samples // 320 + 1) * 320 + 80 - else: - target_length_samples = (chunk_length_samples // 320 + 1) * 320 - - target_length = target_length_samples // 16 # Convert samples back to ms - - adjusted_end = prev_end + target_length - else: - silence_length = ( - silent_ranges[-1][1] - prev_end - if silent_ranges[-1][1] != len(audio_segment) - else 0 - ) - adjusted_end = len(audio_segment) - - silence_length_split = max(0, (silence_length - 300)) # ms - if silence_length_split <= 0: - silence_chunk = None - chunk = audio_segment[prev_end if idx == 0 else prev_end - 5 : adjusted_end] - else: - silence_length_samples = ( - silence_length_split * 16 - ) # Convert ms to samples (16000 samples/sec) - - if idx == 0: - target_length_samples = (silence_length_samples // 320 + 1) * 320 + 80 - else: - target_length_samples = (silence_length_samples // 320 + 1) * 320 - - silence_length_split = ( - target_length_samples // 16 - ) # Convert samples back to ms - - silence_chunk = audio_segment[ - prev_end if idx == 0 else prev_end - 5 : prev_end + silence_length_split - ] - chunk = audio_segment[prev_end + silence_length_split - 5 : adjusted_end] - - if len(chunk) >= min_chunk_length_ms: - if silence_chunk is not None: - audio_chunks.append(silence_chunk) - audio_chunks.append(chunk) - else: - if audio_chunks: - if silence_chunk is not None: - audio_chunks[-1] += silence_chunk - audio_chunks[-1] += chunk - - prev_end = adjusted_end - - return audio_chunks - - -def process_audio_chunks( - audio_processor, audio_encoder, audio_chunks: list[AudioSegment], device -): - features_list = [] - for audio_chunk in audio_chunks: - features = process_audio_chunk( - audio_processor, audio_encoder, audio_chunk, device - ) - features_list.append(features) - return features_list - - -def process_audio_chunk(audio_processor, audio_encoder, audio_chunk, device): - audio_data = np.array(audio_chunk.get_array_of_samples(), dtype=np.float32) - audio_data /= np.iinfo( - np.int8 - if audio_chunk.sample_width == 1 - else np.int16 - if audio_chunk.sample_width == 2 - else np.int32 - ).max - - input_values = audio_processor( - audio_data, sampling_rate=16000, return_tensors="pt" - ).to(device)["input_values"] - - with torch.no_grad(): - logits = audio_encoder(input_values=input_values) - - return logits.last_hidden_state[0] - - -def audio_encode(model, audio_segment, device): - audio_chunks = load_and_split_audio_by_silence(audio_segment) - - features_list = process_audio_chunks( - model.audio_processor, model.audio_encoder, audio_chunks, device - ) - concatenated_features = torch.cat(features_list, dim=0) - - return concatenated_features.detach().cpu().numpy() - - -def dictzip(*iterators): - try: - while True: - yield dict(chain(*[next(iterator).items() for iterator in iterators])) - except StopIteration as e: - pass - - -async def adictzip(*aiterators): - try: - while True: - yield dict( - chain(*[(await anext(aiterator)).items() for aiterator in aiterators]) - ) - except StopAsyncIteration as e: - pass - - -def to_img(t): - t = t.permute(0, 2, 3, 1) - img = ((t / 2.0) + 0.5) * 255.0 - img = torch.clip(img, 0.0, 255.0).type(torch.uint8) - img = img.cpu().numpy() - img = img[:, :, :, [2, 1, 0]] - return img - - -def inference_model(model, v, device, verbose=False): - with torch.no_grad(): - mel, ips, mask, alpha = ( - v["mel"], - v["ips"], - v["mask"], - v["img_gt_with_alpha"], - ) - cpu_ips = ips - cpu_alpha = alpha - - audio = mel.to(device) - ips = ips.to(device).permute(0, 3, 1, 2) - - pred = model.model(ips, audio) - - gen_face = to_img(pred) - - return [ - { - "pred": o, - "mask": mask[j].numpy(), - "ips": cpu_ips[j].numpy(), - "img_gt_with_alpha": cpu_alpha[j].numpy(), - "filename": v["filename"][j], - } - for j, o in enumerate(gen_face) - ] - - -def inference_model_remote(model, v, device, verbose=False): - ips, mel = v["ips"], v["mel"] - try: - pred = model.model( - ips=ips, - mel=mel, - ) - return postprocess_result(pred, v) - except Exception as e: - return [None] * len(v["filename"]) - - -def postprocess_result(pred, v): - pred = pred.cpu().numpy() - pred = pred.transpose(0, 2, 3, 1) - pred = pred[:, :, :, [2, 1, 0]] - return [ - { - "pred": o, - "mask": v["mask"][j].numpy(), - "img_gt_with_alpha": v["img_gt_with_alpha"][j].numpy(), - "filename": v["filename"][j], - } - for j, o in enumerate(pred) - ] - - -async def ainference_model_remote(pool, model, v, device, verbose=False): - ips, mel = v["ips"], v["mel"] - try: - pred = await model.model( - ips=ips, - mel=mel, - ) - - loop = asyncio.get_running_loop() - return await loop.run_in_executor(pool, postprocess_result, pred, v) - except Exception as e: - return [None] * len(v["filename"]) - - -def get_head_box(df, move=False, head_box_idx=0, template_ratio=1.0): - # sz = df['cropped_size'].values[0] - # 원래 4k 템플릿에서 축소된 비율만큼 cropped_box 크기를 줄여준다. - if move: - x1, y1, x2, y2 = np.array(df["cropped_box"][head_box_idx]) - else: - x1, y1, x2, y2 = np.round( - np.array(df["cropped_box"].values[0]) * template_ratio - ).astype(np.uint8) - return x1, y1, x2, y2 diff --git a/stf/stf-api-alternative/src/stf_alternative/.ipynb_checkpoints/model-checkpoint.py b/stf/stf-api-alternative/src/stf_alternative/.ipynb_checkpoints/model-checkpoint.py deleted file mode 100644 index 08266cb00ba40533a40ef66acf7b8dd7173a173a..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/.ipynb_checkpoints/model-checkpoint.py +++ /dev/null @@ -1,156 +0,0 @@ -import errno -import gc -import os -import sys - -import torch - -# from .s2f_dir.src.speech_encoder.WavLM import WavLM, WavLMConfig -from transformers import Wav2Vec2FeatureExtractor, WavLMModel - -from .s2f_dir.src import autoencoder as ae -from .util import * - -g_fix_seed = False -g_audio_processor = None -g_audio_encoder = None - - -class ModelInfo: - def __init__( - self, - model, - audio_processor, - audio_encoder, - args, - device, - work_root_path, - config_path, - checkpoint_path, - verbose=False, - ): - self.model = model - self.audio_processor = audio_processor - self.audio_encoder = audio_encoder - self.args = args - self.device = device - # snow : 아래는 debuging 을 위해 저장해 두는 것 - self.work_root_path = work_root_path - self.config_path = config_path - self.checkpoint_path = checkpoint_path - self.verbose = verbose - - def __del__(self): - if self.verbose: - print("del model , gc:", sys.getrefcount(self.model)) - del self.model - if self.args.model_type == "stf_v3": - del self.audio_encoder - del self.audio_processor - - -def __init_fix_seed(random_seed, verbose=False): - global g_fix_seed - if g_fix_seed == True: - return - - if verbose: - print("fix seed") - fix_seed(random_seed) - g_fix_seed = True - - -def create_model( - config_path, checkpoint_path, work_root_path, device, verbose=False, wavlm_path=None -): - __init_fix_seed(random_seed=1234, verbose=verbose) - global g_audio_encoder - global g_audio_processor - if verbose: - print("load model") - - if not os.path.exists(config_path): - raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), config_path) - - args = read_config(config_path) - if args.model_type and args.model_type == "remote": - return ModelInfo( - model=None, - audio_processor=None, - audio_encoder=None, - args=args, - device=device, - work_root_path=work_root_path, - config_path=config_path, - checkpoint_path=checkpoint_path, - verbose=verbose, - ) - - if not os.path.exists(checkpoint_path): - raise FileNotFoundError( - errno.ENOENT, os.strerror(errno.ENOENT), checkpoint_path - ) - - if args.model_type: - model = ae.Speech2Face( - 3, - (3, args.img_size, args.img_size), - (1, 96, args.mel_step_size), - args.model_type, - ) - else: - model = ae.Speech2Face( - 3, (3, args.img_size, args.img_size), (1, 96, args.mel_step_size), "stf_v1" - ) - - if len(args.model_type) == 0: # snow: 나중에 생긴 설정이어서 이 항목이 없을 수가 있다. - args.model_type = "stf_v1" - - if args.model_type == "stf_v3": - if g_audio_encoder == None: - if wavlm_path is None: - wavlm_path = f"{Path(__file__).parent.parent}/hf_wavlm" - - if verbose: - print(f"@@@@@@@@@@@@@@@@@@ {wavlm_path}") - g_audio_processor = Wav2Vec2FeatureExtractor.from_pretrained(wavlm_path) - g_audio_encoder = WavLMModel.from_pretrained(wavlm_path) - - checkpoint = torch.load(checkpoint_path, map_location="cpu") - if "state_dict" in checkpoint: - model.load_state_dict(checkpoint["state_dict"]) - else: - model.load_state_dict(checkpoint) - if device == "cuda" and torch.cuda.device_count() > 1: - gpus = list(range(torch.cuda.device_count())) - print("Multi GPU activate, gpus : ", gpus) - model = torch.nn.DataParallel(model, device_ids=gpus) - model.to(device) - model.eval() - - if args.model_type == "stf_v3": - g_audio_encoder = torch.nn.DataParallel(g_audio_encoder, device_ids=gpus) - g_audio_encoder.to(device) - g_audio_encoder.eval() - else: - model.to(device).eval() - if args.model_type == "stf_v3": - g_audio_encoder.to(device).eval() - - model_data = ModelInfo( - model=model, - audio_processor=g_audio_processor, - audio_encoder=g_audio_encoder, - args=args, - device=device, - work_root_path=work_root_path, - config_path=config_path, - checkpoint_path=checkpoint_path, - verbose=verbose, - ) - del checkpoint - gc.collect() - if verbose: - print("load model complete") - - return model_data diff --git a/stf/stf-api-alternative/src/stf_alternative/.ipynb_checkpoints/preprocess-checkpoint.py b/stf/stf-api-alternative/src/stf_alternative/.ipynb_checkpoints/preprocess-checkpoint.py deleted file mode 100644 index aa23d5d0a5336755e7f1a6d197f07b84faf8c04c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/.ipynb_checkpoints/preprocess-checkpoint.py +++ /dev/null @@ -1,352 +0,0 @@ -import gc -import os -from pathlib import Path - -import cv2 -from PIL import Image -from tqdm import tqdm - -from .preprocess_dir.utils import crop_with_fan as cwf -from .preprocess_dir.utils import face_finder as ff -from .util import * - - -# template video 전처리 -# preprocess_template_old(기존함수) 와 기능은 동일하고, 메모리 사용량 줄임 -def preprocess_template( - config_path, - template_video_path, - reference_face, - work_root_path, - device, - template_frame_ratio=1.0, - template_video_ratio=[1.0], - callback=None, - verbose=False, - save_frames=True, - silent_video_path=None, - no_infer_frames=[], -): - """template video 전처리 - - Parameters - ---------- - config_path (str) : 설정파일 경로 - template_video_path (str) : 템플릿 영상 경로 - reference_face : (str) : 참고할 얼굴 이미지 경로 - work_root_path (str) : 작업폴더 경로. 전처리 정보가 저장됨. - device (str) : device 정보. ex) cuda:0 - template_frame_ratio (float) : 템플릿 비디오 resize 비율. 1.0: 영상 그대로 사용 - template_video_ratio (list[float]) : 템플릿 비디오 resize 비율. 1.0: 영상 그대로 사용 - save_frames (bool) : 템플릿 비디오 프레임 저장여부 - no_infer_frames (list[tuple[int,int]]) : 추론에 사용되지 않는frame 구간. 시작은 포함, 끝은 포함되지 않음. - """ - load_gpu = False - - config = read_config(config_path) - - image_size = config.img_size - - callback1 = callback_inter( - callback, min_per=0, max_per=2, desc="preprocess_template 1", verbose=verbose - ) - callback2 = callback_inter( - callback, min_per=2, max_per=20, desc="preprocess_template 2", verbose=verbose - ) - callback3 = callback_inter( - callback, min_per=20, max_per=100, desc="preprocess_template 3", verbose=verbose - ) - - preprocess_dir = get_preprocess_dir(work_root_path, config.name) - Path(preprocess_dir).mkdir(exist_ok=True, parents=True) - # snow : for debug - if verbose: - print("preprocess_dir: ", preprocess_dir, ", work_root_path:", work_root_path) - - # 전처리 파일 경로 - crop_mp4 = get_crop_mp4_dir(preprocess_dir, template_video_path) - - if not Path(crop_mp4).exists(): - load_gpu = True - - ff.init_face_finder(device) - cwf.init_fan(device) - - if verbose: - print("템플릿 비디오 처리 ... ") - - # 아나운서 얼굴 정보를 구한다. - df_face, imgs = ff.find_face(reference_face) - callback1(100) # 진행율을 알려준다. - - g_anchor_ebd = df_face["ebd"].values[0] - # 템플릿 동영상에서 아나운서 얼굴 위치만 저장해 놓는다 - df_paths = ff.save_face_info3( - template_video_path, - g_anchor_ebd, - config.move, - base=preprocess_dir, - callback=callback2, - verbose=verbose, - ) - - ### 얼굴 영역을 FAN 랜드마크 기반으로 크롭해 놓는다 - assert len(df_paths) == 1 - if config.move: - if verbose: - print("cwf.save_crop_info_move --") - df_fan_path = cwf.save_crop_info_move( - image_size=image_size, - anchor_box_path=df_paths[0], - mp4_path=template_video_path, - out_dir=crop_mp4, - crop_offset_y=config.crop_offset_y, - crop_margin=config.crop_margin, - callback=callback3, - verbose=verbose, - ) - else: - if verbose: - print("cwf.save_crop_info2 --") - df_fan_path = cwf.save_crop_info2( - image_size=image_size, - anchor_box_path=df_paths[0], - mp4_path=template_video_path, - out_dir=crop_mp4, - crop_offset_y=config.crop_offset_y, - crop_margin=config.crop_margin, - no_infer_frames=no_infer_frames, - callback=callback3, - verbose=verbose, - ) - # snow : for debug - if verbose: - print("df_fan_path: ", df_fan_path) - ff.del_face_finder() - cwf.del_fan() - else: - if verbose: - print("전처리가 이미 되어있음") - callback3(100) - - # 1. save frames for stf - if save_frames: - frame_dir = get_frame_dir( - preprocess_dir, template_video_path, ratio=template_frame_ratio - ) - if verbose: - print("frame_dir:", frame_dir) - save_template_frames( - template_video_path=template_video_path, - template_frames_path=frame_dir, - ratio=template_frame_ratio, - save_in_video=False, - verbose=verbose, - ) - if silent_video_path is not None: - frame_dir = get_frame_dir( - preprocess_dir, silent_video_path, ratio=template_frame_ratio - ) - save_template_frames( - template_video_path=silent_video_path, - template_frames_path=frame_dir, - ratio=template_frame_ratio, - save_in_video=False, - verbose=verbose, - ) - - if template_video_path.endswith(".mov"): - # TODO snow : 성능 확인 필요. - # 지금은 mov 인 경우만 파일을 저장한다. 추론할 때 느려서 라고 한다. by hojin - - # 2. save video for encoding - for video_ratio in template_video_ratio: - if video_ratio != 1.0: - out_path = get_template_ratio_file_path( - preprocess_dir, template_video_path, ratio=video_ratio - ) - save_template_frames( - template_video_path=template_video_path, - template_frames_path="", - template_video_path_with_ratio=out_path, - ratio=video_ratio, - save_in_video=True, - verbose=verbose, - ) - if silent_video_path is not None: - out_path = get_template_ratio_file_path( - preprocess_dir, silent_video_path, ratio=video_ratio - ) - save_template_webm_ratio( - template_video_path=silent_video_path, - ratio=video_ratio, - out_path=out_path, - verbose=verbose, - ) - - if template_video_path.endswith(".webm"): - # TODO snow : 성능 확인 필요. ratio 개수만큼 webm 을 만든다. - for video_ratio in template_video_ratio: - out_path = get_template_ratio_file_path( - preprocess_dir, template_video_path, ratio=video_ratio - ) - save_template_webm_ratio( - template_video_path=template_video_path, - ratio=video_ratio, - out_path=out_path, - verbose=verbose, - ) - if silent_video_path is not None: - out_path = get_template_ratio_file_path( - preprocess_dir, silent_video_path, ratio=video_ratio - ) - save_template_webm_ratio( - template_video_path=silent_video_path, - ratio=video_ratio, - out_path=out_path, - verbose=verbose, - ) - - gc.collect() - return load_gpu - - -# snow: webm 템플릿을 ratio 별로 resize 하여 저장하는 함수 -def save_template_webm_ratio(template_video_path, ratio, out_path, verbose): - def resize_(size, img): - w, h = size - img = cv2.resize(img, (w, h), inter_alg_(w, h, img)) - return img - - def inter_alg_(w, h, img): - if w * h < img.shape[0] * img.shape[1]: - return cv2.INTER_AREA - else: - return cv2.INTER_CUBIC - - os.makedirs(os.path.dirname(out_path), exist_ok=True) - reader, meta = get_four_channel_ffmpeg_reader(template_video_path) - if Path(out_path).exists(): - if verbose: - print(f"ratio 파일이 저장되어 있음, {out_path}") - return - - if verbose: - print(f"webm ratio template, org:{template_video_path}, ratio:{ratio}") - size_org = meta["size"] - size = list(int(round(ratio * v)) // 2 * 2 for v in size_org) - writer = get_webm_ffmpeg_writer( - out_path, size=size, fps=meta["fps"], wav_path=template_video_path - ) - writer.send(None) # seed the generator - - total_cnt, _ = imageio_ffmpeg.count_frames_and_secs(template_video_path) - for idx, f in tqdm( - enumerate(reader), total=total_cnt, desc=f"save webm ratio:{ratio}, size:{size}" - ): - f = np.frombuffer(f, dtype=np.uint8) - f = f.reshape(size_org[1], size_org[0], 4) - f = resize_(size, f) - writer.send(f) # seed the generator - writer.close() - - -# hojin -# png frame 추출 + crop -def save_template_frames( - template_video_path, - template_frames_path, - template_video_path_with_ratio=None, - ratio=1.0, - save_in_video=False, - verbose=False, -): - def inter_alg_(w, h, img): - if w * h < img.shape[0] * img.shape[1]: - return cv2.INTER_AREA - else: - return cv2.INTER_CUBIC - - def resize_(size, img): - w, h = size - img = cv2.resize(img, (w, h), inter_alg_(w, h, img)) - return img - - # hojin: 템플릿을 프레임별로 저장해두기 -> write_video_in_thread에서 reader 사용하지 않기 위함 - if save_in_video == False: - if Path(template_frames_path).exists(): - if verbose: - print("프레임이 모두 저장되어 있음") - return - else: - if Path(template_video_path_with_ratio).exists(): - if verbose: - print("비디오가 생성되어 있음") - return - os.makedirs(os.path.dirname(template_video_path_with_ratio), exist_ok=True) - - if template_video_path.endswith(".mov") or template_video_path.endswith(".webm"): - reader, meta = get_four_channel_ffmpeg_reader(template_video_path) - else: # mp4 - reader, meta = get_three_channel_ffmpeg_reader(template_video_path) - size_org = meta["size"] - size = list(int(round(ratio * v)) // 2 * 2 for v in size_org) - fps = meta["fps"] - if verbose: - print(meta) - - total_cnt, _ = imageio_ffmpeg.count_frames_and_secs(template_video_path) - - if save_in_video is False: - Path(template_frames_path).mkdir(exist_ok=True, parents=True) - - # hojin: 추출한 프레임을 내보내기를 위해서 다시 mov로 만들어놓기 (ratio<1.0) - writer = None - - if verbose: - print("template_frames_path: ", template_frames_path) - for idx, f in tqdm( - enumerate(reader), - total=total_cnt, - desc=f"save frames f{ratio}" - if save_in_video is False - else f"save video f{ratio}", - ): - name = f"""{idx:05d}.webp""" - f = np.frombuffer(f, dtype=np.uint8) - f = f.reshape( - size_org[1], size_org[0], 3 if template_video_path.endswith(".mp4") else 4 - ) - f = resize_(size, f) - if save_in_video is False: - f = np.ascontiguousarray(f) - f = Image.fromarray( - f, mode="RGB" if template_video_path.endswith(".mp4") else "RGBA" - ) - f.save( - str(Path(template_frames_path) / str(name)), format="png", lossless=True - ) - # cv2.imwrite(str(Path(template_frames_path) / str(name)), f[:, :, [2, 1, 0, 3]], [int(cv2.IMWRITE_PNG_COMPRESSION), 3]) - - if writer is None and save_in_video is True: - if ratio != 1.0: - writer = imageio_ffmpeg.write_frames( - template_video_path_with_ratio, - size=size, - fps=fps, - quality=10, - pix_fmt_in="rgba", - pix_fmt_out="rgba", - codec="png", - macro_block_size=1, - ) - writer.send(None) - - if writer: - writer.send(f) - - if writer: - writer.close() - - -# hojin end diff --git a/stf/stf-api-alternative/src/stf_alternative/.ipynb_checkpoints/template-checkpoint.py b/stf/stf-api-alternative/src/stf_alternative/.ipynb_checkpoints/template-checkpoint.py deleted file mode 100644 index 2b12642423ccc801e0e363e7d1c779c594273265..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/.ipynb_checkpoints/template-checkpoint.py +++ /dev/null @@ -1,419 +0,0 @@ -import asyncio -import json -from pathlib import Path - -import asyncstdlib -import numpy as np -import pandas as pd -from pydub import AudioSegment - -from stf_alternative.compose import get_compose_func_without_keying, get_keying_func -from stf_alternative.dataset import LipGanAudio, LipGanImage, LipGanRemoteImage -from stf_alternative.inference import ( - adictzip, - ainference_model_remote, - audio_encode, - dictzip, - get_head_box, - inference_model, - inference_model_remote, -) -from stf_alternative.preprocess_dir.utils import face_finder as ff -from stf_alternative.readers import ( - AsyncProcessPoolBatchIterator, - ProcessPoolBatchIterator, - get_image_folder_async_process_reader, - get_image_folder_process_reader, -) -from stf_alternative.util import ( - acycle, - get_crop_mp4_dir, - get_frame_dir, - get_preprocess_dir, - icycle, - read_config, -) - - -def calc_audio_std(audio_segment): - sample = np.array(audio_segment.get_array_of_samples(), dtype=np.int16) - max_value = np.iinfo( - np.int8 - if audio_segment.sample_width == 1 - else np.int16 - if audio_segment.sample_width == 2 - else np.int32 - ).max - return sample.std() / max_value, len(sample) - - -class RunningAudioNormalizer: - def __init__(self, ref_audio_segment, decay_rate=0.01): - self.ref_std, _ = calc_audio_std(ref_audio_segment) - self.running_var = np.float64(0) - self.running_cnt = 0 - self.decay_rate = decay_rate - - def __call__(self, audio_segment): - std, cnt = calc_audio_std(audio_segment) - self.running_var = (self.running_var + (std**2) * cnt) * (1 - self.decay_rate) - self.running_cnt = (self.running_cnt + cnt) * (1 - self.decay_rate) - - return audio_segment._spawn( - (audio_segment.get_array_of_samples() / self.std * self.ref_std) - .astype(np.int16) - .tobytes() - ) - - @property - def std(self): - return np.sqrt(self.running_var / self.running_cnt) - - -def get_video_metadata(preprocess_dir): - json_path = preprocess_dir / "metadata.json" - with open(json_path, "r") as f: - return json.load(f) - - -class Template: - def __init__( - self, - config_path, - model, - template_video_path, - wav_std=False, - ref_wav=None, - verbose=False, - ): - self.config = read_config(config_path) - self.model = model - - self.template_video_path = Path(template_video_path) - self.preprocess_dir = Path( - get_preprocess_dir(model.work_root_path, model.args.name) - ) - - self.crop_mp4_dir = Path( - get_crop_mp4_dir(self.preprocess_dir, template_video_path) - ) - self.dataset_dir = self.crop_mp4_dir / f"{Path(template_video_path).stem}_000" - - self.template_frames_path = Path( - get_frame_dir(self.preprocess_dir, template_video_path, ratio=1.0) - ) - self.verbose = verbose - self.remote = self.model.args.model_type == "remote" - - self.audio_normalizer = ( - RunningAudioNormalizer(ref_wav) if wav_std else lambda x: x - ) - self.df = pd.read_pickle(self.dataset_dir / "df_fan.pickle") - - metadata = get_video_metadata(self.preprocess_dir) - self.fps = metadata["fps"] - self.width, self.height = metadata["width"], metadata["height"] - - self.keying_func = get_keying_func(self) - self.compose_func = get_compose_func_without_keying(self, ratio=1.0) - - self.move = "move" in self.config.keys() and self.config.move - - self.inference_func = inference_model_remote if self.remote else inference_model - self.batch_size = self.model.args.batch_size - self.unit = 1000 / self.fps - - def _get_reader(self, num_skip_frames): - assert self.template_frames_path.exists() - return get_image_folder_process_reader( - data_path=self.template_frames_path, - num_skip_frames=num_skip_frames, - preload=self.batch_size, - ) - - def _get_local_face_dataset(self, num_skip_frames): - return LipGanImage( - args=self.model.args, - path=self.dataset_dir, - num_skip_frames=num_skip_frames, - ) - - def _get_remote_face_dataset(self, num_skip_frames): - return LipGanRemoteImage( - args=self.model.args, - path=self.dataset_dir, - num_skip_frames=num_skip_frames, - ) - - def _get_mel_dataset(self, audio_segment): - image_count = round( - audio_segment.duration_seconds * self.fps - ) # 패딩 했기 때문에 batch_size로 나뉜다 - ids = list(range(image_count)) - - mel = audio_encode( - model=self.model, - audio_segment=audio_segment, - device=self.model.device, - ) - - return LipGanAudio( - args=self.model.args, - id_list=ids, - mel=mel, - fps=self.fps, - ) - - def _get_face_dataset(self, num_skip_frames): - if self.remote: - return self._get_remote_face_dataset(num_skip_frames=num_skip_frames) - else: - return self._get_local_face_dataset(num_skip_frames=num_skip_frames) - - def _wrap_reader(self, reader): - reader = icycle(reader) - return reader - - def _wrap_dataset(self, dataset): - dataloader = ProcessPoolBatchIterator( - dataset=dataset, - batch_size=self.batch_size, - ) - return dataloader - - def get_reader(self, num_skip_frames=0): - reader = self._get_reader(num_skip_frames=num_skip_frames) - reader = self._wrap_reader(reader) - return reader - - def get_mel_loader(self, audio_segment): - mel_dataset = self._get_mel_dataset(audio_segment) - return self._wrap_dataset(mel_dataset) - - def get_face_loader(self, num_skip_frames=0): - face_dataset = self._get_face_dataset(num_skip_frames=num_skip_frames) - return self._wrap_dataset(face_dataset) # need cycle - - # padding according to batch size. - def pad(self, audio_segment): - num_frames = audio_segment.duration_seconds * self.fps - pad = AudioSegment.silent( - (self.batch_size - (num_frames % self.batch_size)) * (1000 / self.fps) - ) - return audio_segment + pad - - def _prepare_data( - self, - audio_segment, - video_start_offset_frame, - ): - video_start_offset_frame = video_start_offset_frame % len(self.df) - padded = self.pad(audio_segment) - - face_dataset = self._get_face_dataset(num_skip_frames=video_start_offset_frame) - mel_dataset = self._get_mel_dataset(audio_segment=padded) - - n_frames = len(mel_dataset) - assert n_frames % self.batch_size == 0 - - face_loader = self._wrap_dataset(face_dataset) - mel_loader = self._wrap_dataset(mel_dataset) - return padded, face_loader, mel_loader - - def gen_infer( - self, - audio_segment, - video_start_offset_frame, - ): - padded, face_loader, mel_loader = self._prepare_data( - audio_segment=audio_segment, - video_start_offset_frame=video_start_offset_frame, - ) - - for i, v in enumerate(dictzip(iter(mel_loader), iter(face_loader))): - inferred = self.inference_func(self.model, v, self.model.device) - - for j, it in enumerate(inferred): - chunk_pivot = i * self.unit * self.batch_size + j * self.unit - chunk = padded[chunk_pivot : chunk_pivot + self.unit] - yield it, chunk - - def gen_infer_batch( - self, - audio_segment, - video_start_offset_frame, - ): - padded, face_loader, mel_loader = self._prepare_data( - audio_segment=audio_segment, - video_start_offset_frame=video_start_offset_frame, - ) - - for i, v in enumerate(dictzip(iter(mel_loader), iter(face_loader))): - inferred = self.inference_func(self.model, v, self.model.device) - yield inferred, padded[ - i * self.unit * self.batch_size : (i + 1) * self.unit * self.batch_size - ] - - def gen_infer_batch_future( - self, - pool, - audio_segment, - video_start_offset_frame, - ): - padded, face_loader, mel_loader = self._prepare_data( - audio_segment=audio_segment, - video_start_offset_frame=video_start_offset_frame, - ) - - futures = [] - for i, v in enumerate(dictzip(iter(mel_loader), iter(face_loader))): - futures.append( - pool.submit(self.inference_func, self.model, v, self.model.device) - ) - - for i, future in enumerate(futures): - yield future, padded[ - i * self.unit * self.batch_size : (i + 1) * self.unit * self.batch_size - ] - - def gen_infer_concurrent( - self, - pool, - audio_segment, - video_start_offset_frame, - ): - for future, chunk in self.gen_infer_batch_future( - pool, audio_segment, video_start_offset_frame - ): - for i, inferred in enumerate(future.result()): - yield inferred, chunk[i * self.unit : (i + 1) * self.unit] - - def compose( - self, - idx, - frame, - output, - ): - head_box_idx = idx % len(self.df) - head_box = get_head_box( - self.df, - move=self.move, - head_box_idx=head_box_idx, - ) - alpha2 = self.keying_func(output, head_box_idx, head_box) - frame = self.compose_func(alpha2, frame[:, :, :4], head_box_idx) - return frame - - def gen_frames( - self, - audio_segment, - video_start_offset_frame, - reader=None, - ): - reader = reader or self.get_reader(num_skip_frames=video_start_offset_frame) - gen_infer = self.gen_infer(audio_segment, video_start_offset_frame) - - for idx, ((o, a), f) in enumerate( - zip(gen_infer, reader), video_start_offset_frame - ): - composed = self.compose(idx, f, o) - yield composed, a - - def gen_frames_concurrent( - self, - pool, - audio_segment, - video_start_offset_frame, - reader=None, - ): - reader = reader or self.get_reader(num_skip_frames=video_start_offset_frame) - gen_infer = self.gen_infer_concurrent( - pool, - audio_segment, - video_start_offset_frame, - ) - - for idx, ((o, a), f) in enumerate( - zip(gen_infer, reader), video_start_offset_frame - ): - yield self.compose(idx, f, o), a - - -class AsyncTemplate(Template): - async def agen_infer_batch_future( - self, - pool, - audio_segment, - video_start_offset_frame, - ): - assert self.remote - - padded, face_loader, mel_loader = await self._aprepare_data( - pool, - audio_segment=audio_segment, - video_start_offset_frame=video_start_offset_frame, - ) - - futures = [] - async for i, v in asyncstdlib.enumerate( - adictzip(aiter(mel_loader), aiter(face_loader)) - ): - futures.append( - asyncio.create_task( - ainference_model_remote(pool, self.model, v, self.model.device) - ) - ) - - for i, future in enumerate(futures): - yield future, padded[ - i * self.unit * self.batch_size : (i + 1) * self.unit * self.batch_size - ] - - async def _awrap_dataset(self, dataset): - dataloader = AsyncProcessPoolBatchIterator( - dataset=dataset, - batch_size=self.batch_size, - ) - return dataloader - - async def _aprepare_data( - self, - pool, - audio_segment, - video_start_offset_frame, - ): - video_start_offset_frame = video_start_offset_frame % len(self.df) - padded = self.pad(audio_segment) - - loop = asyncio.get_running_loop() - - face_dataset, mel_dataset = await asyncio.gather( - loop.run_in_executor( - pool, self._get_face_dataset, video_start_offset_frame - ), - loop.run_in_executor(pool, self._get_mel_dataset, padded), - ) - - n_frames = len(mel_dataset) - assert n_frames % self.batch_size == 0 - - face_loader = await self._awrap_dataset(face_dataset) - mel_loader = await self._awrap_dataset(mel_dataset) - return padded, face_loader, mel_loader - - def _aget_reader(self, num_skip_frames): - assert self.template_frames_path.exists() - return get_image_folder_async_process_reader( - data_path=self.template_frames_path, - num_skip_frames=num_skip_frames, - preload=self.batch_size, - ) - - def _awrap_reader(self, reader): - reader = acycle(reader) - return reader - - def aget_reader(self, num_skip_frames=0): - reader = self._aget_reader(num_skip_frames=num_skip_frames) - reader = self._awrap_reader(reader) - return reader diff --git a/stf/stf-api-alternative/src/stf_alternative/.ipynb_checkpoints/util-checkpoint.py b/stf/stf-api-alternative/src/stf_alternative/.ipynb_checkpoints/util-checkpoint.py deleted file mode 100644 index e1cd2ea57885324d07a2f282b46724e714ce649f..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/.ipynb_checkpoints/util-checkpoint.py +++ /dev/null @@ -1,276 +0,0 @@ -import json -import random -import string -from datetime import datetime -from pathlib import Path - -import ffmpeg -import imageio_ffmpeg -import numpy as np -import torch -from addict import Dict - - -def icycle(iterable): - while True: - for it in iterable: - yield it - - -async def acycle(aiterable): - while True: - async for it in aiterable: - yield it - - -def read_config(config_path): - try: - with open(config_path) as fd: - conf = json.load(fd) - conf = Dict(conf) - except Exception as e: - print("read config exception in ", config_path) - raise e - return conf - - -def get_preprocess_dir(work_root_path, name): - return str(Path(work_root_path) / "preprocess" / name) - - -def get_crop_mp4_dir(preprocess_dir, video_path): - return f"{preprocess_dir}/crop_video_{Path(video_path).stem}" - - -def get_frame_dir(preprocess_dir, video_path, ratio): - ratio_s = "" if ratio == 1.0 else f"_{ratio}" - return f"{preprocess_dir}/{Path(video_path).stem}/frames{ratio_s}" - - -def get_template_ratio_file_path(preprocess_dir, video_path, ratio): - if ratio == 1.0: - return video_path - - root_path = f"{preprocess_dir}/{Path(video_path).name}" - return f"{root_path}/{Path(video_path).name}_ratio_{ratio}{Path(video_path).suffix}" - - -class _CallBack(object): - def __init__(self, callback, min_per, max_per, desc, verbose=False): - assert max_per > min_per - self.callback = callback - self.min_per = min_per - self.max_per = max_per - if isinstance(callback, _CallBack): - self.desc = callback.desc + "/" + desc - else: - self.desc = desc - self.last_per = -1 - self.verbose = verbose - self.callback_interval = 1 - - def __call__(self, per): - if self.callback is None: - return - my_per = self.min_per + (per + 1) / 100.0 * (self.max_per - self.min_per) - my_per = int(my_per) - if my_per - self.last_per >= self.callback_interval: - # if self.verbose: - # print(self.desc, ' : ', my_per) - self.callback(my_per) - self.last_per = my_per - - -def callback_inter(callback, min_per=0, max_per=100, desc="", verbose=False): - assert min_per >= 0 and max_per >= 0 and max_per > min_per - return _CallBack(callback, min_per, max_per, desc, verbose=verbose) - - -def callback_test(): - def callback(per): - print("real callback", per) - - callback1 = callback_inter(callback, min_per=0, max_per=50, desc="1") - callback2 = callback_inter(callback, min_per=50, max_per=90, desc="2") - callback3 = callback_inter(callback, min_per=90, max_per=100, desc="3") - # for i in range(0,101,10): - # callback1(i) - - callback11 = callback_inter(callback1, min_per=0, max_per=20, desc="a") - callback12 = callback_inter(callback1, min_per=20, max_per=80, desc="b") - callback13 = callback_inter(callback1, min_per=80, max_per=100, desc="c") - - for i in range(0, 101, 1): - callback11(i) - for i in range(0, 101, 1): - callback12(i) - for i in range(0, 101, 1): - callback13(i) - - for i in range(0, 101, 1): - callback2(i) - for i in range(0, 101, 1): - callback3(i) - - -def fix_seed(random_seed): - """ - fix seed to control any randomness from a code - (enable stability of the experiments' results.) - """ - torch.manual_seed(random_seed) - torch.cuda.manual_seed(random_seed) - torch.cuda.manual_seed_all(random_seed) # if use multi-GPU - torch.backends.cudnn.deterministic = True - torch.backends.cudnn.benchmark = False - np.random.seed(random_seed) - random.seed(random_seed) - - -def seed_worker(worker_id): - worker_seed = torch.initial_seed() % 2**32 - np.random.seed(worker_seed) - random.seed(worker_seed) - - -def get_three_channel_ffmpeg_reader(path): - reader = imageio_ffmpeg.read_frames(path) - meta = reader.__next__() # meta data, e.g. meta["size"] -> (width, height) - return reader, meta - - -def get_four_channel_ffmpeg_reader(path): - if path.endswith(".mov"): - reader = imageio_ffmpeg.read_frames( - str(path), pix_fmt="rgba", bits_per_pixel=32 - ) - elif path.endswith(".webm"): - stream_meta = [ - it - for it in ffmpeg.probe(str(path))["streams"] - if it["codec_type"] == "video" - ][0] - reader = imageio_ffmpeg.read_frames( - path=str(path), - pix_fmt="rgba", - input_params=["-c:v", "libvpx-vp9"] - if stream_meta["codec_name"] == "vp9" - else ["-c:v", "libvpx"], - bits_per_pixel=32, - ) - - meta = reader.__next__() # meta data, e.g. meta["size"] -> (width, height) - return reader, meta - - -def get_three_channel_ffmpeg_writer(out_path, size, fps, ffmpeg_params, wav_path): - writer = imageio_ffmpeg.write_frames( - out_path, - size=size, - fps=fps, - ffmpeg_log_level="error", - quality=10, # 0~10 - output_params=ffmpeg_params, - audio_path=wav_path, - macro_block_size=1, - ) - return writer - - -def get_webm_ffmpeg_writer(out_path, size, fps, wav_path, low_quality=False): - writer = imageio_ffmpeg.write_frames( - out_path, - size=size, - fps=fps / 2 if low_quality else fps, - ffmpeg_log_level="error", - quality=10, # 0~10 - # hojin - pix_fmt_in="rgba", - pix_fmt_out="yuva420p", - codec="libvpx", - bitrate="10M", - output_params=["-crf", "4", "-auto-alt-ref", "0"] - + (["-deadline", "realtime"] if low_quality else []), - # output_params=['-b','37800k', '-vf', 'hflip'], # 좌우 반전 테스트 (완료) - # hojin end - audio_path=wav_path, - macro_block_size=1, - ) - return writer - - -def get_mov_ffmpeg_writer(out_path, size, fps, wav_path): - writer = imageio_ffmpeg.write_frames( - out_path, - size=size, - fps=fps, - ffmpeg_log_level="error", - quality=10, # 0~10 - pix_fmt_in="rgba", - pix_fmt_out="yuva444p10le", - # codec="prores_ks", - output_params=[ - "-c:v", - "prores_ks", - "-profile:v", - "4", - "-vendor", - "apl0", - "-bits_per_mb", - "8000", - ], - audio_path=wav_path, - macro_block_size=1, - ) - return writer - - -def get_reader(template_video_path): - # document : https://github.com/imageio/imageio-ffmpeg - if template_video_path.endswith(".mp4"): - reader, meta = get_three_channel_ffmpeg_reader(template_video_path) - elif template_video_path.endswith(".mov") or template_video_path.endswith(".webm"): - reader, meta = get_four_channel_ffmpeg_reader(template_video_path) - else: - assert False - return reader, meta - - -def get_writer(out_path, size, fps, wav_path, slow_write): - if out_path.endswith(".mp4"): - # 합성하면서 비디오 생성 - ffmpeg_params = None - if slow_write: - # ffmpeg_params=['-acodec', 'aac', '-preset', 'veryslow', '-crf', '17'] - ffmpeg_params = ["-acodec", "aac", "-crf", "17"] - writer = get_three_channel_ffmpeg_writer( - out_path, size, fps, ffmpeg_params, wav_path - ) - elif out_path.endswith(".mov"): - writer = get_mov_ffmpeg_writer(out_path, size, fps, wav_path) - elif out_path.endswith(".webm"): - writer = get_webm_ffmpeg_writer( - out_path, size, fps, wav_path - ) # webm fps 변경한다.(속도를 위해) - else: - print('out_path should one of ["mp4", "webm"]') - assert False - return writer - - -def pretty_string_dict(d, tab=4): - s = ["{\n"] - for k, v in d.items(): - if isinstance(v, dict): - v = pretty_string_dict(v, tab + 1) - else: - v = repr(v) - - s.append("%s%r: %s,\n" % (" " * tab, k, v)) - s.append("%s}" % (" " * tab)) - return "".join(s) - - -def get_random_string_with_len(size: int): - time_str = datetime.now().strftime("%y%m%d_%H%M%S_") - return "".join([time_str] + random.choices(string.ascii_letters, k=size)) diff --git a/stf/stf-api-alternative/src/stf_alternative/__init__.py b/stf/stf-api-alternative/src/stf_alternative/__init__.py deleted file mode 100644 index 895641a6947a007e57c5ae2c0faa8f5257a18392..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .model import create_model -from .preprocess import preprocess_template -from .template import AsyncTemplate, Template diff --git a/stf/stf-api-alternative/src/stf_alternative/__pycache__/__init__.cpython-310.pyc b/stf/stf-api-alternative/src/stf_alternative/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 0669acbcbd90def1cfc7c4afa63e58c0d6dc864f..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/src/stf_alternative/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/stf/stf-api-alternative/src/stf_alternative/__pycache__/compose.cpython-310.pyc b/stf/stf-api-alternative/src/stf_alternative/__pycache__/compose.cpython-310.pyc deleted file mode 100644 index 1a22f82c921c719faeca20ebbcb90fa883f90c68..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/src/stf_alternative/__pycache__/compose.cpython-310.pyc and /dev/null differ diff --git a/stf/stf-api-alternative/src/stf_alternative/__pycache__/dataset.cpython-310.pyc b/stf/stf-api-alternative/src/stf_alternative/__pycache__/dataset.cpython-310.pyc deleted file mode 100644 index 3e0f563924d577968d626e906e35092e1e335054..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/src/stf_alternative/__pycache__/dataset.cpython-310.pyc and /dev/null differ diff --git a/stf/stf-api-alternative/src/stf_alternative/__pycache__/inference.cpython-310.pyc b/stf/stf-api-alternative/src/stf_alternative/__pycache__/inference.cpython-310.pyc deleted file mode 100644 index d71c1ce101942b38feb301ac04f31584957bebc0..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/src/stf_alternative/__pycache__/inference.cpython-310.pyc and /dev/null differ diff --git a/stf/stf-api-alternative/src/stf_alternative/__pycache__/model.cpython-310.pyc b/stf/stf-api-alternative/src/stf_alternative/__pycache__/model.cpython-310.pyc deleted file mode 100644 index 9970981097c5833bae4d853c06b5beb4dd664aac..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/src/stf_alternative/__pycache__/model.cpython-310.pyc and /dev/null differ diff --git a/stf/stf-api-alternative/src/stf_alternative/__pycache__/preprocess.cpython-310.pyc b/stf/stf-api-alternative/src/stf_alternative/__pycache__/preprocess.cpython-310.pyc deleted file mode 100644 index 3fb2f66345e53b8251885966054b3b302685f361..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/src/stf_alternative/__pycache__/preprocess.cpython-310.pyc and /dev/null differ diff --git a/stf/stf-api-alternative/src/stf_alternative/__pycache__/readers.cpython-310.pyc b/stf/stf-api-alternative/src/stf_alternative/__pycache__/readers.cpython-310.pyc deleted file mode 100644 index da97d00ebfecd5b97a43987e2eac8a547c525030..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/src/stf_alternative/__pycache__/readers.cpython-310.pyc and /dev/null differ diff --git a/stf/stf-api-alternative/src/stf_alternative/__pycache__/template.cpython-310.pyc b/stf/stf-api-alternative/src/stf_alternative/__pycache__/template.cpython-310.pyc deleted file mode 100644 index 67367b8c979b6eef1ea83076ab4c3275b36762ce..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/src/stf_alternative/__pycache__/template.cpython-310.pyc and /dev/null differ diff --git a/stf/stf-api-alternative/src/stf_alternative/__pycache__/util.cpython-310.pyc b/stf/stf-api-alternative/src/stf_alternative/__pycache__/util.cpython-310.pyc deleted file mode 100644 index 84e18b99a1fce5ff9e5cb391055886e61a51ad1f..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/src/stf_alternative/__pycache__/util.cpython-310.pyc and /dev/null differ diff --git a/stf/stf-api-alternative/src/stf_alternative/compose.py b/stf/stf-api-alternative/src/stf_alternative/compose.py deleted file mode 100644 index 61d7d3f5d4918dc231e7e6544b8625d88a72cd1c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/compose.py +++ /dev/null @@ -1,675 +0,0 @@ -from pathlib import Path - -import cv2 -import numpy as np -import pandas as pd - - -def maskblur(mask, kernel_size, sigma=1): - mask_blur = cv2.GaussianBlur(mask, (kernel_size, kernel_size), sigma) - return mask_blur - - -def erosion(mask, kernel_size): - kernel = np.ones((kernel_size, kernel_size), np.uint8) - erosion_image = cv2.erode(mask, kernel, iterations=1) # // make erosion image - return erosion_image - - -def dilate(mask, kernel_size): - kernel = np.ones((kernel_size, kernel_size), np.uint8) - erosion_image = cv2.dilate(mask, kernel, iterations=1) # // make erosion image - return erosion_image - - -def resize_adapt(model_out, crop_region): - def inter_alg(target_size, img): - if isinstance(target_size, tuple): - w, h = target_size - else: - w, h = target_size, target_size - return inter_alg_(w, h, img) - - def inter_alg_(w, h, img): - if w * h < img.shape[0] * img.shape[1]: - return cv2.INTER_AREA - else: - return cv2.INTER_CUBIC - - x1, y1, x2, y2 = crop_region - h, w = y2 - y1 + 1, x2 - x1 + 1 - sz = model_out.shape[0] # h,w 동일하다. - if h == sz and w == sz: - return model_out - - r = max(h, w) / sz - max_hw = max(h, w) - temp_ = cv2.resize(model_out, (max_hw, max_hw), inter_alg(max_hw, model_out)) - temp_ = temp_[ - (max_hw - h) // 2 : (max_hw - h) // 2 + h, - (max_hw - w) // 2 : (max_hw - w) // 2 + w, - ] - return temp_ - - -def get_face_mask( - img_size, df_fan_row, blur_ratio=0.3, dilate_ratio=0.2, erosion_ratio=0 -): - assert blur_ratio >= 0 and blur_ratio <= 1 - assert erosion_ratio >= 0 and erosion_ratio <= 1 - assert dilate_ratio >= 0 and dilate_ratio <= 1 - - def _masking(img, pts, value): - img = cv2.fillPoly(img, [pts], value) - return img - - def _get_face_pts_n_box(img_size, df_fan_row): - box = df_fan_row["cropped_box"] - pts2d = df_fan_row["pts2d"] - np.array([box[0], box[1]]) - - if isinstance(df_fan_row["cropped_size"], float): - cropped_size = df_fan_row["cropped_size"] - else: - cropped_size = df_fan_row["cropped_size"][0] - ratio = img_size[0] / cropped_size - pts2d = pts2d * ratio - xs, ys = pts2d[:, 0], pts2d[:, 1] - l, t, r, b = min(xs), min(ys), max(xs), max(ys) - return np.concatenate([pts2d[0:17, :], pts2d[17:27, :][::-1]]).astype( - np.int32 - ), (l, t, r, b) - - if df_fan_row["pts2d"] is None: - mask = np.zeros((img_size[1], img_size[0]), dtype=np.uint8) - if len(mask.shape) == 2: - mask = np.expand_dims(mask, axis=2) - return {"crop": mask, "origin": 1 - mask} - - pts, box = _get_face_pts_n_box(img_size, df_fan_row) - h = max(box[2] - box[0], box[3] - box[1]) - mask = np.zeros((img_size[1], img_size[0]), dtype=np.uint8) - mask = _masking(mask, pts, (255)) - if dilate_ratio != 0: - mask = dilate(mask, int(h * dilate_ratio) // 2 * 2 + 1) - if erosion_ratio != 0: - mask = erosion(mask, int(h * erosion_ratio) // 2 * 2 + 1) - if blur_ratio != 0: - blur_kernel_size = int(h * blur_ratio) // 2 * 2 + 1 - mask = maskblur(mask, blur_kernel_size, 0) - mask = mask / 255 - if len(mask.shape) == 2: - mask = np.expand_dims(mask, axis=2) - return {"crop": mask, "origin": 1 - mask} - - -def cromakey_green(img): - r = img[:, :, 0] - g = img[:, :, 1] - b = img[:, :, 2] - g_alpha = g > 50 - r_alpha = (g * 1.0) > r - b_alpha = (g * 0.7) > b - alpha = g_alpha & (r_alpha & b_alpha) - alpha = (1 - alpha) * 255 - alpha = alpha.astype(np.uint8) - - alpha = maskblur(alpha, kernel_size=13) - alpha[np.where(alpha > 100)] = 255 - alpha = erosion(alpha, kernel_size=5) - - if len(alpha.shape) == 2: - alpha2 = np.expand_dims(alpha, axis=2) - else: - alpha2 = alpha - new = np.concatenate((img, alpha2), axis=2) - return new - - -def cromakey_green_binary(img): - img = cromakey_green(img) - alpha = img[:, :, 3] - alpha[np.where(alpha <= 128)] = 0 - alpha[np.where(alpha > 128)] = 1 - - -def cromakey_green_hunet_lmy(img): - r = img[:, :, 0] - g = img[:, :, 1] - b = img[:, :, 2] - g_alpha = g > 70 - r_alpha = g > r - b_alpha = (g * 0.8) > b - alpha = g_alpha & (r_alpha & b_alpha) - alpha = (1 - alpha) * 255 - alpha = alpha.astype(np.uint8) - - alpha = maskblur(alpha, kernel_size=11) - alpha[np.where(alpha > 100)] = 255 - alpha = maskblur(alpha, kernel_size=3) - alpha = erosion(alpha, kernel_size=3) - - if len(alpha.shape) == 2: - alpha2 = np.expand_dims(alpha, axis=2) - else: - alpha2 = alpha - new = np.concatenate((img, alpha2), axis=2) - return new - - -# ybm 영상용 크로마키 함수 -def cromakey_green_ybm_front(img): - r, g, b = img[:, :, 0], img[:, :, 1], img[:, :, 2] - - g_alpha = g > 70 - # r_alpha = (g * 0.7) > r - # b_alpha = (g * 0.7) > b - r_alpha = g > r - b_alpha = (g * 0.9) > b - - alpha = g_alpha & (r_alpha & b_alpha) - alpha = (1 - alpha) * 255 - alpha = alpha.astype(np.uint8) - - alpha = maskblur(alpha, kernel_size=11) - alpha[np.where(alpha > 100)] = 255 - alpha = maskblur(alpha, kernel_size=3) - alpha = maskblur(alpha, kernel_size=3) - - grey_alpha = alpha < 255 - g[grey_alpha] = r[grey_alpha] * 0.8 - - if len(alpha.shape) == 2: - alpha2 = np.expand_dims(alpha, axis=2) - else: - alpha2 = alpha - new = np.concatenate((img, alpha2), axis=2) - return new - - -# ybm 영상용 크로마키 함수 -def cromakey_green_ybm_side(img): - img = img.copy() - r, g, b = img[:, :, 0], img[:, :, 1], img[:, :, 2] - g_alpha = g > 50 - r_alpha = g > r - b_alpha = (g * 0.9) > b - - alpha = g_alpha & (r_alpha & b_alpha) - alpha = (1 - alpha) * 255 - alpha = alpha.astype(np.uint8) - - alpha = maskblur(alpha, kernel_size=11) - alpha[np.where(alpha > 100)] = 255 - alpha = maskblur(alpha, kernel_size=3) - alpha = maskblur(alpha, kernel_size=3) - - grey_alpha = alpha < 255 - g[grey_alpha] = r[grey_alpha] * 0.8 - - if len(alpha.shape) == 2: - alpha2 = np.expand_dims(alpha, axis=2) - else: - alpha2 = alpha - new = np.concatenate((img, alpha2), axis=2) - return new - - -# devin 영상용 크로마키 함수 -def cromakey_green_devin_side(img): - img = img.copy() - r, g, b = img[:, :, 0], img[:, :, 1], img[:, :, 2] - g_alpha = g > 70 - r_alpha = (g * 0.8) > r - # r_alpha = g > r - b_alpha = (g * 0.9) > b - - alpha = g_alpha & (r_alpha & b_alpha) - alpha = (1 - alpha) * 255 - alpha = alpha.astype(np.uint8) - - alpha = maskblur(alpha, kernel_size=7, sigma=3) - alpha[np.where(alpha < 150)] = 0 - alpha = maskblur(alpha, kernel_size=5, sigma=2) - - if len(alpha.shape) == 2: - alpha = np.expand_dims(alpha, axis=2) - - new = np.concatenate((img, alpha), axis=2) - return new - - -def get_cromakey_func(args): - if "cromakey" not in args.keys(): - return cromakey_green_hunet_lmy - if "cromakey_green_ybm_front" == args.cromakey: - return cromakey_green_ybm_front - if "cromakey_green_ybm_side" == args.cromakey: - return cromakey_green_ybm_side - if "cromakey_green_devin_side" == args.cromakey: - return cromakey_green_devin_side - - raise "cromakey not found" - - -def compose_default_(model_out, org_image_with_alpha, mask, **kwargs): - # 1. 마스크 섞기 : 원래 비디오의 투명값과 계산한 마스크를 섞는다. - mask = mask[:, :, 0] - mask[np.where(mask > 0)] = 1 # 마스크 영역을 128 -> 1 로 만든다. - model_out[:, :, 3] = ( - org_image_with_alpha[:, :, 3] * (1 - mask) + model_out[:, :, 3] * mask - ) - - # 2. 섞인 마스크가 좀 자연스럽게 섞이도록 함. - model_out[:, :, 3] = maskblur(model_out[:, :, 3], kernel_size=3, sigma=1) - return model_out - - -def compose_devin_(model_out, org_image_with_alpha, mask, debug=False, **kwargs): - mask = mask[:, :, 0] - mask[np.where(mask > 0)] = 1 # 마스크 영역을 128 -> 1 로 만든다. - mask = mask.astype(np.float32) - - # 1. 기존마스크와 경계가 잘 안보이도록 마스크를 부드럽게 만든다. - kernel_size = int(mask.shape[0] * 0.03) // 2 * 2 + 1 # 이미지 크기의 3% 정도 마스크를 확장한다. - if debug: - print( - f"## compose_devin_: kernel_size:{kernel_size}, mask_height:{mask.shape[0]}" - ) - if kernel_size >= 3: - mask = dilate(mask, kernel_size=kernel_size) - mask = maskblur(mask, kernel_size=kernel_size, sigma=kernel_size // 2) - mask = maskblur(mask, kernel_size=kernel_size, sigma=kernel_size // 2) - mask = erosion(mask, kernel_size=3) # 1pixel 만 줄임 - - # 2. 마스크 섞기 : 원래 비디오의 투명값과 계산한 마스크를 섞는다. - model_out[:, :, 3] = ( - org_image_with_alpha[:, :, 3] * (1 - mask) + model_out[:, :, 3] * mask - ) - - # 3. 섞인 마스크가 부드럽게 한번더 블러를 한다. - model_out[:, :, 3] = maskblur(model_out[:, :, 3], kernel_size=3, sigma=1) - - return model_out - - -def get_compose_mask_func(args): - if "cromakey" in args.keys(): - if "cromakey_green_devin_side" == args.cromakey: - return compose_devin_ - if "compose" in args.keys(): - if "compose_smooth" == args.compose: - return compose_devin_ - return compose_default_ - - -def get_keying_func(template): - cromakey_func = get_cromakey_func(template.model.args) - compose_func = get_compose_mask_func(template.model.args) - - def keying_(pred, idx, box=None): - model_out, mask, alpha = pred["pred"], pred["mask"], pred["img_gt_with_alpha"] - - if pred["filename"].endswith("_no.jpg") or pred["filename"].endswith("_no.png"): - return alpha[:, :, [2, 1, 0, 3]] - - if ( - alpha.shape[0] != mask.shape[0] - or alpha.shape[1] != mask.shape[1] - or alpha.shape[0] != model_out.shape[0] - or alpha.shape[1] != model_out.shape[1] - ): - raise Exception( - f"not matched keying shape. " - f"alpha: {alpha.shape[0]}, {alpha.shape[1]}, {alpha.shape[2]}, " - f"mask: {mask.shape[0]}, {mask.shape[1]}, " - f"model_out: {model_out.shape[0]}, {model_out.shape[1]}" - ) - - if box is not None: - model_h = model_out.shape[0] - box_h = box[3] - box[1] - if box_h > model_h: - model_out = resize_adapt(model_out, box) - mask = resize_adapt(mask, box) - alpha = resize_adapt(alpha, box) - model_out = cromakey_func(model_out) - model_out = compose_func( - model_out=model_out, org_image_with_alpha=alpha, mask=mask - ) - - return model_out - - return keying_ - - -def get_box_mask(width, height, config, verbose=False): - def get_mask_( - width, height, gradation_width, gradation_bottom=None, box_mask_erosion=None - ): - mask = np.ones((height, width, 1)) - r = list(range(0, gradation_width, 1)) - for s, e in zip(r, r[1:]): - g = s / gradation_width - # print(f'---- s:{s}, e:{e}, g:{g}') - mask[s:e, s : width - s, :] = g - mask[height - e : height - s, s : width - s, :] = g - mask[s : height - s, s:e, :] = g - mask[s : height - s, width - e : width - s, :] = g - if gradation_bottom is not None: - r = list(range(0, gradation_bottom, 1)) - for s, e in zip(r, r[1:]): - g = s / gradation_bottom - mask[height - e : height - s, s : width - s, :] = g - if box_mask_erosion is not None: - mask = erosion(mask, box_mask_erosion * 2 + 1) - if len(mask.shape) == 2: - mask = np.expand_dims(mask, 2) # mask shape ex: (352,352,1) - - return mask - - gradation_width = int(height * 0.1) - gradation_bottom = ( - int(height * config["gradation_bottom"]) - if "gradation_bottom" in config.keys() - else None - ) - box_mask_erosion = ( - int(height * config["box_mask_erosion"]) - if "box_mask_erosion" in config.keys() - else None - ) - # if verbose: - # print('gradation_width : ', gradation_width) - # print('gradation_bottom : ', gradation_bottom) - # print('box_mask_erosion : ', box_mask_erosion) - mask = get_mask_(width, height, gradation_width, gradation_bottom, box_mask_erosion) - mask_crop = mask - mask_origin = 1 - mask - return {"crop": mask_crop, "origin": mask_origin} - - -def get_compose_func_without_keying_move(template, ratio, verbose=False): - args = template.model.args - df = pd.read_pickle( - f"{template.crop_mp4_dir}/{Path(template.template_video_path).stem}_000/df_fan.pickle" - ) - df = df.set_index("frame_idx") - move_head_box_size = ( - (df.loc[0]["cropped_box"][2] - df.loc[0]["cropped_box"][0] - 20) // 10 * 10 - ) - - def resize_and_scale(model_out, head_box_idx): - # ratio 1.0 에 맞는 크기로 resize 하고, - # 원래 영상에서 10의 배수에 해당하는 위치로 (head_box, model_out) 모두 잘라낸다. - head_box = df["cropped_box"][head_box_idx] - if ratio == 1.0: - return model_out, head_box - - # 일단 원래 크기로 만든다. - model_out = resize_adapt(model_out, head_box) - - # 원래 크기에서의 박스에서 10의 배수에 해당하는 좌표를 찾는다. - l, t = (np.array(head_box[:2]) + 9) // 10 * 10 - new_head_box = np.array( - [l, t, l + move_head_box_size - 1, t + move_head_box_size - 1] - ) # 양쪽포함이라서 1을 빼준다. - - # 10의 배수에 맞춰서 이미지를 잘라낸다. - diff_box = new_head_box - head_box - new_model_out = model_out[diff_box[1] : diff_box[3], diff_box[0] : diff_box[2]] - # if verbose and head_box_idx == 0: - # print('org head_box:', head_box, ', new_head_box:', new_head_box) - # print('alpah2.shape:', model_out.shape, ', new_model_out:', new_model_out.shape) - if ( - new_model_out.shape[0] % 10 != 0 or new_model_out.shape[1] % 10 != 0 - ): # 크기는 10의 배수여야 한다. - raise Exception(f"new_model_out.shape % 10 != 0, {new_model_out.shape}") - - # ratio에 맞는 크기로 변경한다. - x1, y1, _, _ = np.round(new_head_box * ratio).astype(np.uint8) - # 양쪽포함이라서 -1을 해준다. - new_head_box = ( - x1, - y1, - x1 + int(move_head_box_size * ratio) - 1, - y1 + int(move_head_box_size * ratio) - 1, - ) - new_model_out = resize_adapt(new_model_out, new_head_box) - - # if verbose and head_box_idx == 0: - # print('org head_box:', head_box, ', new_head_box:', new_head_box) - # print('alpah2.shape:', model_out.shape, ', new_model_out:', new_model_out.shape) - - return new_model_out, new_head_box - - def compose_one(model_out, full_img, head_box_idx): - model_out, box = resize_and_scale(model_out, head_box_idx) - x1, y1, x2, y2 = box - img = resize_adapt(model_out, (x1, y1, x2, y2)) - if ( - "compose" in template.config.keys() - and template.config.compose == "face_only" - ): - row = df.loc[head_box_idx] - mask_box = get_face_mask( - (img.shape[1], img.shape[0]), row, **get_compose_option(template.config) - ) - else: - mask_box = get_box_mask( - x2 - x1 + 1, y2 - y1 + 1, config=args, verbose=verbose - ) - - if y2 - y1 + 1 != img.shape[0] or x2 - x1 + 1 != img.shape[1]: - raise Exception( - f"not matched compose shape. x2-x1+1: {x2 - x1 + 1}, y2-y1+1:{y2 - y1 + 1}, img: {img.shape[1]}, {img.shape[0]}" - ) - - # Compose the image - if full_img.shape[2] == 3: - alpha = np.zeros_like(full_img[:, :, :1]) - alpha.fill(255) - full_img = np.concatenate([full_img, alpha], axis=2) - - out_memory = full_img.copy() - - alpha = img[:, :, 3] - alpha = cv2.merge([alpha, alpha, alpha]) - - back = out_memory[y1 : y2 + 1, x1 : x2 + 1].copy() - front = img[:, :, 0:3] - - img = np.concatenate( - [np.where(alpha < (255, 255, 255), back[:, :, :3], front), back[:, :, 3:]], - axis=2, - ) - out_memory[y1 : y2 + 1, x1 : x2 + 1] = ( - full_img[y1 : y2 + 1, x1 : x2 + 1] * mask_box["origin"] - + img * mask_box["crop"] - ) - return out_memory - - return compose_one - - -def get_compose_func_without_keying_default(template, ratio, verbose=False): - args = template.model.args - df = pd.read_pickle( - f"{template.crop_mp4_dir}/{Path(template.template_video_path).stem}_000/df_fan.pickle" - ) - # sz = df['cropped_size'].values[0] - # 원래 4k 템플릿에서 축소된 비율만큼 cropped_box 크기를 줄여준다. - x1, y1, x2, y2 = np.round(np.array(df["cropped_box"].values[0]) * ratio).astype( - np.uint8 - ) - del df - mask_box = get_box_mask(x2 - x1 + 1, y2 - y1 + 1, config=args, verbose=verbose) - img_size = args.img_size - if verbose: - print("croped size: ", x2 - x1 + 1, y2 - y1 + 1) - print("croped region(x1,y1,x2,y2): ", x1, y1, x2, y2) - - def compose_one(model_out, full_img, _): - img = resize_adapt(model_out, (x1, y1, x2, y2)) - if y2 - y1 + 1 != img.shape[0] or x2 - x1 + 1 != img.shape[1]: - raise Exception( - f"not matched compose shape. x2-x1+1: {x2 - x1 + 1}, y2-y1+1:{y2 - y1 + 1}, img: {img.shape[1]}, {img.shape[0]}" - ) - - # 붙여넣기 - if full_img.shape[2] == 3: - alpha = np.zeros_like(full_img[:, :, :1]) - alpha.fill(255) - full_img = np.concatenate([full_img, alpha], axis=2) - - out_memory = full_img.copy() - - alpha = img[:, :, 3] - alpha = cv2.merge([alpha, alpha, alpha]) - - back = out_memory[y1 : y2 + 1, x1 : x2 + 1].copy() - front = img[:, :, 0:3] - - img = np.concatenate( - [np.where(alpha < (255, 255, 255), back[:, :, :3], front), back[:, :, 3:]], - axis=2, - ) - - out_memory[y1 : y2 + 1, x1 : x2 + 1] = ( - full_img[y1 : y2 + 1, x1 : x2 + 1] * mask_box["origin"] - + img * mask_box["crop"] - ) - return out_memory - - return compose_one - - -def get_compose_option(config): - blur_ratio = 0.3 - dilate_ratio = 0.2 - erosion_ratio = 0.0 - if "compose_args" in config.keys(): - if "blur_ratio" in config.compose_args.keys(): - blur_ratio = config.compose_args.blur_ratio - if "dilate_ratio" in config.compose_args.keys(): - dilate_ratio = config.compose_args.dilate_ratio - if "erosion_ratio" in config.compose_args.keys(): - erosion_ratio = config.compose_args.erosion_ratio - return { - "blur_ratio": blur_ratio, - "dilate_ratio": dilate_ratio, - "erosion_ratio": erosion_ratio, - } - - -def get_compose_func_without_keying_face_only(template, ratio, verbose=False): - df = pd.read_pickle( - f"{template.crop_mp4_dir}/{Path(template.template_video_path).stem}_000/df_fan.pickle" - ) - x1, y1, x2, y2 = np.round(np.array(df["cropped_box"].values[0]) * ratio).astype( - np.uint8 - ) - - df = df.set_index("frame_idx") - if verbose: - print("get_compose_option") - print(get_compose_option(template.config)) - - def compose_one(model_out, full_img, head_box_idx): - try: - row = df.loc[head_box_idx] - except Exception as e: - print("exception get_compose_func_without_keying_face_only", e) - raise Exception("exception get_compose_func_without_keying_face_only", e) - img = resize_adapt(model_out, (x1, y1, x2, y2)) - if y2 - y1 + 1 != img.shape[0] or x2 - x1 + 1 != img.shape[1]: - raise Exception( - f"not matched compose shape. x2-x1+1: {x2 - x1 + 1}, y2-y1+1:{y2 - y1 + 1}, img: {img.shape[1]}, {img.shape[0]}" - ) - - mask_box = get_face_mask( - (img.shape[1], img.shape[0]), row, **get_compose_option(template.config) - ) - # 붙여넣기 - out_memory = full_img.copy() - out_memory[y1 : y2 + 1, x1 : x2 + 1] = ( - full_img[y1 : y2 + 1, x1 : x2 + 1] * mask_box["origin"] - + img * mask_box["crop"] - ) - return out_memory - - return compose_one - - -# template video 의 frame 과 model inference 결과를 합성하는 함수를 리턴한다. -# params -# ratio : 템플릿 scale 비율. -# 1.0: 템플릿 크기 그대로 -# 0.5: width, height 를 절반으로 줄인 크기 -def get_compose_func_without_keying(template, ratio, verbose=False): - if "move" in template.config.keys() and template.config.move: - return get_compose_func_without_keying_move( - template=template, ratio=ratio, verbose=verbose - ) - if "compose" in template.config.keys() and template.config.compose == "face_only": - return get_compose_func_without_keying_face_only( - template=template, ratio=ratio, verbose=verbose - ) - - return get_compose_func_without_keying_default( - template=template, ratio=ratio, verbose=verbose - ) - - -def compose_direct(box, model_args, ratio, model_out, full_img): - x1, y1, x2, y2 = box - mask_box = get_box_mask(x2 - x1 + 1, y2 - y1 + 1, config=model_args) - img_size = model_args.img_size - - img = resize_adapt(model_out, (x1, y1, x2, y2)) - if y2 - y1 + 1 != img.shape[0] or x2 - x1 + 1 != img.shape[1]: - raise Exception( - f"not matched compose shape. x2-x1+1: {x2 - x1 + 1}, y2-y1+1:{y2 - y1 + 1}, img: {img.shape[1]}, {img.shape[0]}" - ) - - # 붙여넣기 - out_memory = full_img.copy() - out_memory[y1 : y2 + 1, x1 : x2 + 1] = ( - full_img[y1 : y2 + 1, x1 : x2 + 1] * mask_box["origin"] + img * mask_box["crop"] - ) - return out_memory - - -def keying_direct(model_args, pred, box=None): - cromakey_func = get_cromakey_func(model_args) - compose_func = get_compose_mask_func(model_args) - - model_out, mask, alpha = pred["pred"], pred["mask"], pred["img_gt_with_alpha"] - - if pred["filename"].endswith("_no.jpg") or pred["filename"].endswith("_no.png"): - return alpha[:, :, [2, 1, 0, 3]] - - if ( - alpha.shape[0] != mask.shape[0] - or alpha.shape[1] != mask.shape[1] - or alpha.shape[0] != model_out.shape[0] - or alpha.shape[1] != model_out.shape[1] - or alpha.shape[2] != 4 - ): - raise Exception( - f"not matched keying shape. " - f"alpha: {alpha.shape[0]}, {alpha.shape[1]}, {alpha.shape[2]}, " - f"mask: {mask.shape[0]}, {mask.shape[1]}, " - f"model_out: {model_out.shape[0]}, {model_out.shape[1]}" - ) - - if box is not None: - model_h = model_out.shape[0] - box_h = box[3] - box[1] - if box_h > model_h: - model_out = resize_adapt(model_out, box) - mask = resize_adapt(mask, box) - alpha = resize_adapt(alpha, box) - model_out = cromakey_func(model_out) - model_out = compose_func(model_out=model_out, org_image_with_alpha=alpha, mask=mask) - - return model_out diff --git a/stf/stf-api-alternative/src/stf_alternative/dataset.py b/stf/stf-api-alternative/src/stf_alternative/dataset.py deleted file mode 100644 index 1aa291d6fc7b17c0693b5cb93aed37424306b014..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/dataset.py +++ /dev/null @@ -1,244 +0,0 @@ -import os.path -import random -from glob import glob -from pathlib import Path - -import cv2 -import numpy as np -import pandas as pd -from torch.utils.data import Dataset - -from stf_alternative.s2f_dir.src.mask_history import calc_poly - - -def frame_id(fname): - return int(os.path.basename(fname).split("_")[0]) - - -def masking(im, pts): - im = cv2.fillPoly(im, [pts], (128, 128, 128)) - return im - - -accepted_format = set([".webp", ".png", ".jpg"]) - - -class LipGanImage(Dataset): - def __init__(self, args, path, num_skip_frames=0): - self.args = args - paths = sorted( - [it for it in glob(f"{path}/*") if Path(it).suffix in accepted_format] - ) - self.paths = paths[num_skip_frames:] + paths[:num_skip_frames] - - self.mask_ver = ( - list(args.mask_ver) - if isinstance(args.mask_ver, (list, tuple)) - else [args.mask_ver] - ) - self.keying_mask_ver = ( - args.keying_mask_ver if "keying_mask_ver" in args else None - ) - self.smoothing_mask = True if args.smoothing_mask else False - self.num_ips = args.num_ips - - df = pd.read_pickle(path / "df_fan.pickle") - self.df = df.set_index("frame_idx")["cropped_pts2d"] - - def __getitem__(self, idx): - img_name = Path(self.paths[idx]) - gt_fname = img_name.name - dir_name = img_name.parent - - sidx = frame_id(gt_fname) - img_gt = cv2.imread(str(img_name), cv2.IMREAD_UNCHANGED) - - masked = img_gt[:, :, :3].copy() - img_ip = masked * 2.0 / 255.0 - 1.0 - - if self.df[sidx] is None: - # snow : 인사하는 템플릿이 들어오면서 preds 가 없는 경우가 생겼다. - # 이런 경우, 마스크 없이 원래 이미지를 그대로 준다. - mask = np.zeros_like(masked, dtype=np.uint8) - else: - mask_ver = random.choice(self.mask_ver) - pts = calc_poly[mask_ver](self.df[sidx], masked.shape[0], randomness=False) - if self.keying_mask_ver is not None: - keying_pts = calc_poly[self.keying_mask_ver]( - self.df[sidx], masked.shape[0], randomness=False - ) - else: - keying_pts = pts - - if self.smoothing_mask: - pts = smoothing_mask(pts) - masked = masking(masked, pts) - mask = np.zeros_like(masked, dtype=np.uint8) - mask = masking(mask, keying_pts) - - img_ips = [img_ip for _ in range(self.num_ips)] - ips = np.concatenate([masked * 2.0 / 255.0 - 1.0] + img_ips, axis=2) - - if img_gt.shape[2] == 3: - alpha = np.zeros_like(img_gt[:, :, :1]) - alpha.fill(255) - img_gt = np.concatenate([img_gt, alpha], axis=2) - - return { - "ips": ips.astype(np.float32), - "mask": mask, - "img_gt_with_alpha": img_gt, - "filename": str(img_name), - } - - def __len__(self): - return len(self.paths) - - -class LipGanRemoteImage(Dataset): - def __init__(self, args, path, num_skip_frames=0): - self.args = args - paths = sorted( - [it for it in glob(f"{path}/*") if Path(it).suffix in accepted_format] - ) - self.paths = paths[num_skip_frames:] + paths[:num_skip_frames] - self.num_skip_frames = num_skip_frames - - self.mask_ver = ( - list(args.mask_ver) - if isinstance(args.mask_ver, (list, tuple)) - else [args.mask_ver] - ) - self.keying_mask_ver = ( - args.keying_mask_ver if "keying_mask_ver" in args else None - ) - self.smoothing_mask = True if args.smoothing_mask else False - self.num_ips = args.num_ips - - df = pd.read_pickle(path / "df_fan.pickle") - self.df = df.set_index("frame_idx")["cropped_pts2d"] - - def __getitem__(self, idx): - img_name = Path(self.paths[idx]) - gt_fname = img_name.name - sidx = frame_id(gt_fname) - img_gt = cv2.imread(str(img_name), cv2.IMREAD_UNCHANGED) - - masked = img_gt[:, :, :3].copy() - img_ip = img_gt[:, :, :3].copy() - - if self.df[sidx] is None: - mask = np.zeros_like(masked, dtype=np.uint8) - else: - mask_ver = random.choice(self.mask_ver) - pts = calc_poly[mask_ver](self.df[sidx], masked.shape[0], randomness=False) - if self.keying_mask_ver is not None: - keying_pts = calc_poly[self.keying_mask_ver]( - self.df[sidx], masked.shape[0], randomness=False - ) - else: - keying_pts = pts - - if self.smoothing_mask: - pts = smoothing_mask(pts) - masked = masking(masked, pts) - mask = np.zeros_like(masked, dtype=np.uint8) - mask = masking(mask, keying_pts) - - img_ips = [img_ip for _ in range(self.num_ips)] - ips = np.concatenate([masked] + img_ips, axis=2) - - if img_gt.shape[2] == 3: - alpha = np.zeros_like(img_gt[:, :, :1]) - alpha.fill(255) - img_gt = np.concatenate([img_gt, alpha], axis=2) - - return { - "ips": ips.transpose(2, 0, 1), - "mask": mask, - "img_gt_with_alpha": img_gt, - "filename": str(img_name), - } - - def __len__(self): - return len(self.paths) - - -def get_processed_audio_segment(center_frame_id, processed_wav, fps, sample_rate): - time_center = center_frame_id / fps - - center_idx = int(time_center * sample_rate) - center_idx = center_idx // 320 - start_idx = center_idx - 39 - - new_logits = processed_wav.copy() - if start_idx < 0: - new_logits = np.pad( - new_logits, ((-start_idx, 0), (0, 0)), mode="constant", constant_values=0 - ) - start_idx = 0 - - end_idx = start_idx + 39 * 2 - if len(new_logits) < end_idx: - new_logits = np.pad( - new_logits, - ((0, end_idx - len(new_logits)), (0, 0)), - mode="constant", - constant_values=0, - ) - - return new_logits[start_idx:end_idx, :] - - -def zero_wav_mels_when_silent_center( - mels, mel_ps, zero_mels, zero=-4, t_secs=0.25, verbose=False -): - if t_secs is None: - return mels - - t_size = t_secs * mel_ps - _, t_axis = mels.shape - if t_size >= t_axis: - # 원하는 구간이 원래 보고 있는 구간보다 크다면 그대로 준다. - return mels - - t_size_half = int(t_size * 0.5) - if verbose: - print(f"t_axis:{t_axis}, t_size_half: {t_size_half}") - t_axis_s, t_axis_e = int(t_axis / 2) - t_size_half, int(t_axis / 2) + t_size_half - t_axis_s, t_axis_e = max(t_axis_s, 0), min(t_axis_e, t_axis) - if (mels[:, t_axis_s:t_axis_e] == -4).all(): - return zero_mels - - return mels - - -class LipGanAudio(Dataset): - def __init__(self, args, id_list, mel, fps): - if args.model_type in ("stf_v1", "stf_v2"): - raise "Did not support version < stf_v3" - - self.id_list = id_list - self.mel = mel - self.fps = fps - - self.silent_secs = ( - None if "silent_secs" not in args.keys() else args["silent_secs"] - ) - self.zero_mels = np.full((96, args.mel_step_size), -4, dtype=np.float32) - self.mel_ps = args.mel_ps - - def __getitem__(self, idx): - mel = get_processed_audio_segment(self.id_list[idx], self.mel, self.fps, 16000) - mel = zero_wav_mels_when_silent_center( - mels=mel, - mel_ps=self.mel_ps, - zero_mels=self.zero_mels, - t_secs=self.silent_secs, - ) - return { - "mel": mel, - } - - def __len__(self): - return len(self.id_list) diff --git a/stf/stf-api-alternative/src/stf_alternative/inference.py b/stf/stf-api-alternative/src/stf_alternative/inference.py deleted file mode 100644 index 2c497302132a140326cce2e381885101537d23ce..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/inference.py +++ /dev/null @@ -1,270 +0,0 @@ -import asyncio -from itertools import chain - -import numpy as np -import torch -from pydub import AudioSegment, silence - - -def check_split_lengths(silent_ranges, len_audio): - prev_end = 0 - for idx, (start, end) in enumerate(silent_ranges): - if idx < len(silent_ranges) - 1: - if silent_ranges[idx + 1][0] - start > 70000: - return False - else: - if len_audio - start > 70000: - return False - return True - - -def load_and_split_audio_by_silence( - audio_segment, - silence_thresh: int = -75, - min_silence_len: int = 500, - min_chunk_length_ms: int = 40, - seek_step: int = 100, - verbose: bool = False, -): - audio_segment = audio_segment.set_channels(1) - audio_segment = audio_segment.set_frame_rate(16000) - - for st in range(silence_thresh, -50, 5): - for msl in range(min_silence_len, 0, -100): - silent_ranges = silence.detect_silence( - audio_segment, msl, st, seek_step=seek_step - ) - length_ok = check_split_lengths(silent_ranges, len(audio_segment)) - if length_ok: - break - - if len(silent_ranges) > 0 and length_ok: - break - - if ( - len(silent_ranges) == 0 - and len(audio_segment) < 70000 - and len(audio_segment) >= 40 - ): - return [audio_segment] - - assert ( - length_ok and len(silent_ranges) > 0 - ), "Each sentence must be within 70 seconds, including silence" - - audio_chunks = [] - prev_end = 0 - - for idx, (start, end) in enumerate(silent_ranges): - if idx < len(silent_ranges) - 1: - chunk_length = silent_ranges[idx + 1][0] - prev_end - silence_length = end - prev_end - chunk_length_samples = ( - chunk_length * 16 - ) # Convert ms to samples (16000 samples/sec) - - if idx == 0: - target_length_samples = (chunk_length_samples // 320 + 1) * 320 + 80 - else: - target_length_samples = (chunk_length_samples // 320 + 1) * 320 - - target_length = target_length_samples // 16 # Convert samples back to ms - - adjusted_end = prev_end + target_length - else: - silence_length = ( - silent_ranges[-1][1] - prev_end - if silent_ranges[-1][1] != len(audio_segment) - else 0 - ) - adjusted_end = len(audio_segment) - - silence_length_split = max(0, (silence_length - 300)) # ms - if silence_length_split <= 0: - silence_chunk = None - chunk = audio_segment[prev_end if idx == 0 else prev_end - 5 : adjusted_end] - else: - silence_length_samples = ( - silence_length_split * 16 - ) # Convert ms to samples (16000 samples/sec) - - if idx == 0: - target_length_samples = (silence_length_samples // 320 + 1) * 320 + 80 - else: - target_length_samples = (silence_length_samples // 320 + 1) * 320 - - silence_length_split = ( - target_length_samples // 16 - ) # Convert samples back to ms - - silence_chunk = audio_segment[ - prev_end if idx == 0 else prev_end - 5 : prev_end + silence_length_split - ] - chunk = audio_segment[prev_end + silence_length_split - 5 : adjusted_end] - - if len(chunk) >= min_chunk_length_ms: - if silence_chunk is not None: - audio_chunks.append(silence_chunk) - audio_chunks.append(chunk) - else: - if audio_chunks: - if silence_chunk is not None: - audio_chunks[-1] += silence_chunk - audio_chunks[-1] += chunk - - prev_end = adjusted_end - - return audio_chunks - - -def process_audio_chunks( - audio_processor, audio_encoder, audio_chunks: list[AudioSegment], device -): - features_list = [] - for audio_chunk in audio_chunks: - features = process_audio_chunk( - audio_processor, audio_encoder, audio_chunk, device - ) - features_list.append(features) - return features_list - - -def process_audio_chunk(audio_processor, audio_encoder, audio_chunk, device): - audio_data = np.array(audio_chunk.get_array_of_samples(), dtype=np.float32) - audio_data /= np.iinfo( - np.int8 - if audio_chunk.sample_width == 1 - else np.int16 - if audio_chunk.sample_width == 2 - else np.int32 - ).max - - input_values = audio_processor( - audio_data, sampling_rate=16000, return_tensors="pt" - ).to(device)["input_values"] - - with torch.no_grad(): - logits = audio_encoder(input_values=input_values) - - return logits.last_hidden_state[0] - - -def audio_encode(model, audio_segment, device): - audio_chunks = load_and_split_audio_by_silence(audio_segment) - - features_list = process_audio_chunks( - model.audio_processor, model.audio_encoder, audio_chunks, device - ) - concatenated_features = torch.cat(features_list, dim=0) - - return concatenated_features.detach().cpu().numpy() - - -def dictzip(*iterators): - try: - while True: - yield dict(chain(*[next(iterator).items() for iterator in iterators])) - except StopIteration as e: - pass - - -async def adictzip(*aiterators): - try: - while True: - yield dict( - chain(*[(await anext(aiterator)).items() for aiterator in aiterators]) - ) - except StopAsyncIteration as e: - pass - - -def to_img(t): - t = t.permute(0, 2, 3, 1) - img = ((t / 2.0) + 0.5) * 255.0 - img = torch.clip(img, 0.0, 255.0).type(torch.uint8) - img = img.cpu().numpy() - img = img[:, :, :, [2, 1, 0]] - return img - - -def inference_model(model, v, device, verbose=False): - with torch.no_grad(): - mel, ips, mask, alpha = ( - v["mel"], - v["ips"], - v["mask"], - v["img_gt_with_alpha"], - ) - cpu_ips = ips - cpu_alpha = alpha - - audio = mel.to(device) - ips = ips.to(device).permute(0, 3, 1, 2) - - pred = model.model(ips, audio) - - gen_face = to_img(pred) - - return [ - { - "pred": o, - "mask": mask[j].numpy(), - "ips": cpu_ips[j].numpy(), - "img_gt_with_alpha": cpu_alpha[j].numpy(), - "filename": v["filename"][j], - } - for j, o in enumerate(gen_face) - ] - - -def inference_model_remote(model, v, device, verbose=False): - ips, mel = v["ips"], v["mel"] - try: - pred = model.model( - ips=ips, - mel=mel, - ) - return postprocess_result(pred, v) - except Exception as e: - return [None] * len(v["filename"]) - - -def postprocess_result(pred, v): - pred = pred.cpu().numpy() - pred = pred.transpose(0, 2, 3, 1) - pred = pred[:, :, :, [2, 1, 0]] - return [ - { - "pred": o, - "mask": v["mask"][j].numpy(), - "img_gt_with_alpha": v["img_gt_with_alpha"][j].numpy(), - "filename": v["filename"][j], - } - for j, o in enumerate(pred) - ] - - -async def ainference_model_remote(pool, model, v, device, verbose=False): - ips, mel = v["ips"], v["mel"] - try: - pred = await model.model( - ips=ips, - mel=mel, - ) - - loop = asyncio.get_running_loop() - return await loop.run_in_executor(pool, postprocess_result, pred, v) - except Exception as e: - return [None] * len(v["filename"]) - - -def get_head_box(df, move=False, head_box_idx=0, template_ratio=1.0): - # sz = df['cropped_size'].values[0] - # 원래 4k 템플릿에서 축소된 비율만큼 cropped_box 크기를 줄여준다. - if move: - x1, y1, x2, y2 = np.array(df["cropped_box"][head_box_idx]) - else: - x1, y1, x2, y2 = np.round( - np.array(df["cropped_box"].values[0]) * template_ratio - ).astype(np.uint8) - return x1, y1, x2, y2 diff --git a/stf/stf-api-alternative/src/stf_alternative/model.py b/stf/stf-api-alternative/src/stf_alternative/model.py deleted file mode 100644 index 08266cb00ba40533a40ef66acf7b8dd7173a173a..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/model.py +++ /dev/null @@ -1,156 +0,0 @@ -import errno -import gc -import os -import sys - -import torch - -# from .s2f_dir.src.speech_encoder.WavLM import WavLM, WavLMConfig -from transformers import Wav2Vec2FeatureExtractor, WavLMModel - -from .s2f_dir.src import autoencoder as ae -from .util import * - -g_fix_seed = False -g_audio_processor = None -g_audio_encoder = None - - -class ModelInfo: - def __init__( - self, - model, - audio_processor, - audio_encoder, - args, - device, - work_root_path, - config_path, - checkpoint_path, - verbose=False, - ): - self.model = model - self.audio_processor = audio_processor - self.audio_encoder = audio_encoder - self.args = args - self.device = device - # snow : 아래는 debuging 을 위해 저장해 두는 것 - self.work_root_path = work_root_path - self.config_path = config_path - self.checkpoint_path = checkpoint_path - self.verbose = verbose - - def __del__(self): - if self.verbose: - print("del model , gc:", sys.getrefcount(self.model)) - del self.model - if self.args.model_type == "stf_v3": - del self.audio_encoder - del self.audio_processor - - -def __init_fix_seed(random_seed, verbose=False): - global g_fix_seed - if g_fix_seed == True: - return - - if verbose: - print("fix seed") - fix_seed(random_seed) - g_fix_seed = True - - -def create_model( - config_path, checkpoint_path, work_root_path, device, verbose=False, wavlm_path=None -): - __init_fix_seed(random_seed=1234, verbose=verbose) - global g_audio_encoder - global g_audio_processor - if verbose: - print("load model") - - if not os.path.exists(config_path): - raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), config_path) - - args = read_config(config_path) - if args.model_type and args.model_type == "remote": - return ModelInfo( - model=None, - audio_processor=None, - audio_encoder=None, - args=args, - device=device, - work_root_path=work_root_path, - config_path=config_path, - checkpoint_path=checkpoint_path, - verbose=verbose, - ) - - if not os.path.exists(checkpoint_path): - raise FileNotFoundError( - errno.ENOENT, os.strerror(errno.ENOENT), checkpoint_path - ) - - if args.model_type: - model = ae.Speech2Face( - 3, - (3, args.img_size, args.img_size), - (1, 96, args.mel_step_size), - args.model_type, - ) - else: - model = ae.Speech2Face( - 3, (3, args.img_size, args.img_size), (1, 96, args.mel_step_size), "stf_v1" - ) - - if len(args.model_type) == 0: # snow: 나중에 생긴 설정이어서 이 항목이 없을 수가 있다. - args.model_type = "stf_v1" - - if args.model_type == "stf_v3": - if g_audio_encoder == None: - if wavlm_path is None: - wavlm_path = f"{Path(__file__).parent.parent}/hf_wavlm" - - if verbose: - print(f"@@@@@@@@@@@@@@@@@@ {wavlm_path}") - g_audio_processor = Wav2Vec2FeatureExtractor.from_pretrained(wavlm_path) - g_audio_encoder = WavLMModel.from_pretrained(wavlm_path) - - checkpoint = torch.load(checkpoint_path, map_location="cpu") - if "state_dict" in checkpoint: - model.load_state_dict(checkpoint["state_dict"]) - else: - model.load_state_dict(checkpoint) - if device == "cuda" and torch.cuda.device_count() > 1: - gpus = list(range(torch.cuda.device_count())) - print("Multi GPU activate, gpus : ", gpus) - model = torch.nn.DataParallel(model, device_ids=gpus) - model.to(device) - model.eval() - - if args.model_type == "stf_v3": - g_audio_encoder = torch.nn.DataParallel(g_audio_encoder, device_ids=gpus) - g_audio_encoder.to(device) - g_audio_encoder.eval() - else: - model.to(device).eval() - if args.model_type == "stf_v3": - g_audio_encoder.to(device).eval() - - model_data = ModelInfo( - model=model, - audio_processor=g_audio_processor, - audio_encoder=g_audio_encoder, - args=args, - device=device, - work_root_path=work_root_path, - config_path=config_path, - checkpoint_path=checkpoint_path, - verbose=verbose, - ) - del checkpoint - gc.collect() - if verbose: - print("load model complete") - - return model_data diff --git a/stf/stf-api-alternative/src/stf_alternative/preprocess.py b/stf/stf-api-alternative/src/stf_alternative/preprocess.py deleted file mode 100644 index aa23d5d0a5336755e7f1a6d197f07b84faf8c04c..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/preprocess.py +++ /dev/null @@ -1,352 +0,0 @@ -import gc -import os -from pathlib import Path - -import cv2 -from PIL import Image -from tqdm import tqdm - -from .preprocess_dir.utils import crop_with_fan as cwf -from .preprocess_dir.utils import face_finder as ff -from .util import * - - -# template video 전처리 -# preprocess_template_old(기존함수) 와 기능은 동일하고, 메모리 사용량 줄임 -def preprocess_template( - config_path, - template_video_path, - reference_face, - work_root_path, - device, - template_frame_ratio=1.0, - template_video_ratio=[1.0], - callback=None, - verbose=False, - save_frames=True, - silent_video_path=None, - no_infer_frames=[], -): - """template video 전처리 - - Parameters - ---------- - config_path (str) : 설정파일 경로 - template_video_path (str) : 템플릿 영상 경로 - reference_face : (str) : 참고할 얼굴 이미지 경로 - work_root_path (str) : 작업폴더 경로. 전처리 정보가 저장됨. - device (str) : device 정보. ex) cuda:0 - template_frame_ratio (float) : 템플릿 비디오 resize 비율. 1.0: 영상 그대로 사용 - template_video_ratio (list[float]) : 템플릿 비디오 resize 비율. 1.0: 영상 그대로 사용 - save_frames (bool) : 템플릿 비디오 프레임 저장여부 - no_infer_frames (list[tuple[int,int]]) : 추론에 사용되지 않는frame 구간. 시작은 포함, 끝은 포함되지 않음. - """ - load_gpu = False - - config = read_config(config_path) - - image_size = config.img_size - - callback1 = callback_inter( - callback, min_per=0, max_per=2, desc="preprocess_template 1", verbose=verbose - ) - callback2 = callback_inter( - callback, min_per=2, max_per=20, desc="preprocess_template 2", verbose=verbose - ) - callback3 = callback_inter( - callback, min_per=20, max_per=100, desc="preprocess_template 3", verbose=verbose - ) - - preprocess_dir = get_preprocess_dir(work_root_path, config.name) - Path(preprocess_dir).mkdir(exist_ok=True, parents=True) - # snow : for debug - if verbose: - print("preprocess_dir: ", preprocess_dir, ", work_root_path:", work_root_path) - - # 전처리 파일 경로 - crop_mp4 = get_crop_mp4_dir(preprocess_dir, template_video_path) - - if not Path(crop_mp4).exists(): - load_gpu = True - - ff.init_face_finder(device) - cwf.init_fan(device) - - if verbose: - print("템플릿 비디오 처리 ... ") - - # 아나운서 얼굴 정보를 구한다. - df_face, imgs = ff.find_face(reference_face) - callback1(100) # 진행율을 알려준다. - - g_anchor_ebd = df_face["ebd"].values[0] - # 템플릿 동영상에서 아나운서 얼굴 위치만 저장해 놓는다 - df_paths = ff.save_face_info3( - template_video_path, - g_anchor_ebd, - config.move, - base=preprocess_dir, - callback=callback2, - verbose=verbose, - ) - - ### 얼굴 영역을 FAN 랜드마크 기반으로 크롭해 놓는다 - assert len(df_paths) == 1 - if config.move: - if verbose: - print("cwf.save_crop_info_move --") - df_fan_path = cwf.save_crop_info_move( - image_size=image_size, - anchor_box_path=df_paths[0], - mp4_path=template_video_path, - out_dir=crop_mp4, - crop_offset_y=config.crop_offset_y, - crop_margin=config.crop_margin, - callback=callback3, - verbose=verbose, - ) - else: - if verbose: - print("cwf.save_crop_info2 --") - df_fan_path = cwf.save_crop_info2( - image_size=image_size, - anchor_box_path=df_paths[0], - mp4_path=template_video_path, - out_dir=crop_mp4, - crop_offset_y=config.crop_offset_y, - crop_margin=config.crop_margin, - no_infer_frames=no_infer_frames, - callback=callback3, - verbose=verbose, - ) - # snow : for debug - if verbose: - print("df_fan_path: ", df_fan_path) - ff.del_face_finder() - cwf.del_fan() - else: - if verbose: - print("전처리가 이미 되어있음") - callback3(100) - - # 1. save frames for stf - if save_frames: - frame_dir = get_frame_dir( - preprocess_dir, template_video_path, ratio=template_frame_ratio - ) - if verbose: - print("frame_dir:", frame_dir) - save_template_frames( - template_video_path=template_video_path, - template_frames_path=frame_dir, - ratio=template_frame_ratio, - save_in_video=False, - verbose=verbose, - ) - if silent_video_path is not None: - frame_dir = get_frame_dir( - preprocess_dir, silent_video_path, ratio=template_frame_ratio - ) - save_template_frames( - template_video_path=silent_video_path, - template_frames_path=frame_dir, - ratio=template_frame_ratio, - save_in_video=False, - verbose=verbose, - ) - - if template_video_path.endswith(".mov"): - # TODO snow : 성능 확인 필요. - # 지금은 mov 인 경우만 파일을 저장한다. 추론할 때 느려서 라고 한다. by hojin - - # 2. save video for encoding - for video_ratio in template_video_ratio: - if video_ratio != 1.0: - out_path = get_template_ratio_file_path( - preprocess_dir, template_video_path, ratio=video_ratio - ) - save_template_frames( - template_video_path=template_video_path, - template_frames_path="", - template_video_path_with_ratio=out_path, - ratio=video_ratio, - save_in_video=True, - verbose=verbose, - ) - if silent_video_path is not None: - out_path = get_template_ratio_file_path( - preprocess_dir, silent_video_path, ratio=video_ratio - ) - save_template_webm_ratio( - template_video_path=silent_video_path, - ratio=video_ratio, - out_path=out_path, - verbose=verbose, - ) - - if template_video_path.endswith(".webm"): - # TODO snow : 성능 확인 필요. ratio 개수만큼 webm 을 만든다. - for video_ratio in template_video_ratio: - out_path = get_template_ratio_file_path( - preprocess_dir, template_video_path, ratio=video_ratio - ) - save_template_webm_ratio( - template_video_path=template_video_path, - ratio=video_ratio, - out_path=out_path, - verbose=verbose, - ) - if silent_video_path is not None: - out_path = get_template_ratio_file_path( - preprocess_dir, silent_video_path, ratio=video_ratio - ) - save_template_webm_ratio( - template_video_path=silent_video_path, - ratio=video_ratio, - out_path=out_path, - verbose=verbose, - ) - - gc.collect() - return load_gpu - - -# snow: webm 템플릿을 ratio 별로 resize 하여 저장하는 함수 -def save_template_webm_ratio(template_video_path, ratio, out_path, verbose): - def resize_(size, img): - w, h = size - img = cv2.resize(img, (w, h), inter_alg_(w, h, img)) - return img - - def inter_alg_(w, h, img): - if w * h < img.shape[0] * img.shape[1]: - return cv2.INTER_AREA - else: - return cv2.INTER_CUBIC - - os.makedirs(os.path.dirname(out_path), exist_ok=True) - reader, meta = get_four_channel_ffmpeg_reader(template_video_path) - if Path(out_path).exists(): - if verbose: - print(f"ratio 파일이 저장되어 있음, {out_path}") - return - - if verbose: - print(f"webm ratio template, org:{template_video_path}, ratio:{ratio}") - size_org = meta["size"] - size = list(int(round(ratio * v)) // 2 * 2 for v in size_org) - writer = get_webm_ffmpeg_writer( - out_path, size=size, fps=meta["fps"], wav_path=template_video_path - ) - writer.send(None) # seed the generator - - total_cnt, _ = imageio_ffmpeg.count_frames_and_secs(template_video_path) - for idx, f in tqdm( - enumerate(reader), total=total_cnt, desc=f"save webm ratio:{ratio}, size:{size}" - ): - f = np.frombuffer(f, dtype=np.uint8) - f = f.reshape(size_org[1], size_org[0], 4) - f = resize_(size, f) - writer.send(f) # seed the generator - writer.close() - - -# hojin -# png frame 추출 + crop -def save_template_frames( - template_video_path, - template_frames_path, - template_video_path_with_ratio=None, - ratio=1.0, - save_in_video=False, - verbose=False, -): - def inter_alg_(w, h, img): - if w * h < img.shape[0] * img.shape[1]: - return cv2.INTER_AREA - else: - return cv2.INTER_CUBIC - - def resize_(size, img): - w, h = size - img = cv2.resize(img, (w, h), inter_alg_(w, h, img)) - return img - - # hojin: 템플릿을 프레임별로 저장해두기 -> write_video_in_thread에서 reader 사용하지 않기 위함 - if save_in_video == False: - if Path(template_frames_path).exists(): - if verbose: - print("프레임이 모두 저장되어 있음") - return - else: - if Path(template_video_path_with_ratio).exists(): - if verbose: - print("비디오가 생성되어 있음") - return - os.makedirs(os.path.dirname(template_video_path_with_ratio), exist_ok=True) - - if template_video_path.endswith(".mov") or template_video_path.endswith(".webm"): - reader, meta = get_four_channel_ffmpeg_reader(template_video_path) - else: # mp4 - reader, meta = get_three_channel_ffmpeg_reader(template_video_path) - size_org = meta["size"] - size = list(int(round(ratio * v)) // 2 * 2 for v in size_org) - fps = meta["fps"] - if verbose: - print(meta) - - total_cnt, _ = imageio_ffmpeg.count_frames_and_secs(template_video_path) - - if save_in_video is False: - Path(template_frames_path).mkdir(exist_ok=True, parents=True) - - # hojin: 추출한 프레임을 내보내기를 위해서 다시 mov로 만들어놓기 (ratio<1.0) - writer = None - - if verbose: - print("template_frames_path: ", template_frames_path) - for idx, f in tqdm( - enumerate(reader), - total=total_cnt, - desc=f"save frames f{ratio}" - if save_in_video is False - else f"save video f{ratio}", - ): - name = f"""{idx:05d}.webp""" - f = np.frombuffer(f, dtype=np.uint8) - f = f.reshape( - size_org[1], size_org[0], 3 if template_video_path.endswith(".mp4") else 4 - ) - f = resize_(size, f) - if save_in_video is False: - f = np.ascontiguousarray(f) - f = Image.fromarray( - f, mode="RGB" if template_video_path.endswith(".mp4") else "RGBA" - ) - f.save( - str(Path(template_frames_path) / str(name)), format="png", lossless=True - ) - # cv2.imwrite(str(Path(template_frames_path) / str(name)), f[:, :, [2, 1, 0, 3]], [int(cv2.IMWRITE_PNG_COMPRESSION), 3]) - - if writer is None and save_in_video is True: - if ratio != 1.0: - writer = imageio_ffmpeg.write_frames( - template_video_path_with_ratio, - size=size, - fps=fps, - quality=10, - pix_fmt_in="rgba", - pix_fmt_out="rgba", - codec="png", - macro_block_size=1, - ) - writer.send(None) - - if writer: - writer.send(f) - - if writer: - writer.close() - - -# hojin end diff --git a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/__init__.py b/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/__pycache__/__init__.cpython-310.pyc b/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 8eb8a0397c10fd668ccabc6a8b310e22175936ce..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/.ipynb_checkpoints/crop_with_fan-checkpoint.py b/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/.ipynb_checkpoints/crop_with_fan-checkpoint.py deleted file mode 100644 index eb9ea5dd761fb0fb25879a9f48fdcc4f2fc9cc4f..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/.ipynb_checkpoints/crop_with_fan-checkpoint.py +++ /dev/null @@ -1,1081 +0,0 @@ -import math -import os -import pdb -from glob import glob -from pathlib import Path - -import cv2 -import face_alignment -import imageio -import imageio_ffmpeg -import numpy as np -import pandas as pd -import torch -from moviepy.editor import AudioFileClip, ImageSequenceClip -from scipy import stats -from tqdm.auto import tqdm - -from stf_alternative.util import ( - callback_inter, - get_four_channel_ffmpeg_reader, - get_three_channel_ffmpeg_reader, -) - -from . import face_finder as ff - -g_detector_fan = None -g_detector_fan3d = None - - -def init_fan(device="cuda:0"): - global g_detector_fan - global g_detector_fan3d - if g_detector_fan is None: - try: - g_detector_fan = face_alignment.FaceAlignment( - face_alignment.LandmarksType._2D, flip_input=False, device=device - ) - except AttributeError: - g_detector_fan = face_alignment.FaceAlignment( - face_alignment.LandmarksType.TWO_D, flip_input=False, device=device - ) - if g_detector_fan3d is None: - try: - g_detector_fan3d = face_alignment.FaceAlignment( - face_alignment.LandmarksType._3D, flip_input=False, device=device - ) - except AttributeError: - g_detector_fan3d = face_alignment.FaceAlignment( - face_alignment.LandmarksType.THREE_D, flip_input=False, device=device - ) - - -def del_fan(): - global g_detector_fan - global g_detector_fan3d - if g_detector_fan is not None: - del g_detector_fan - g_detector_fan = None - - if g_detector_fan3d is None: - del g_detector_fan3d - g_detector_fan3d = None - torch.cuda.empty_cache() - - -def fan_box(pred, img, type3d): - if type3d: - xlist, ylist, _ = zip(*pred) - else: - xlist, ylist = zip(*pred) - xlist = [int(round(x)) for x in xlist] - ylist = [int(round(x)) for x in ylist] - y1, y2, x1, x2 = [min(ylist), max(ylist), min(xlist), max(xlist)] - size = max(y2 - y1 + 1, x2 - x1 + 1) - size = int(round(size)) - cx, cy = (x1 + x2) / 2, (y1 + y2) / 2 - x1, y1 = int(round(cx - size / 2)), int(round(cy - size / 2)) - x2, y2 = x1 + size - 1, y1 + size - 1 - - y1 = max(0, y1) - y2 = min(img.shape[0], y2) - x1 = max(0, x1) - x2 = min(img.shape[1], x2) - return (x1, y1, x2, y2) - - -def face_detect_fan_(img, type3d): - global g_detector_fan - global g_detector_fan3d - - # snow : init_fan 을 미리 불러주지 않았으면 여기서 불리도록한다. - init_fan() - - if type3d: - preds = g_detector_fan3d.get_landmarks(img) - else: - preds = g_detector_fan.get_landmarks(img) - - preds = [(fan_box(p, img, type3d), p) for p in preds] - preds = [((b[2] - b[0]) * (b[3] - b[1]), b, p) for b, p in preds] - preds = sorted(preds) - area, (x1, y1, x2, y2), pred = preds[-1] - return np.round((pred)).astype(np.int32), np.array([x1, y1, x2, y2]) - - -def face_detect_fan(img, type3d=False): - # snow : 인사하는 템플릿이 나와서 실제 얼굴이 없는 영역이 나옴에 따라 얼굴없는 경우 처리를 다시 넣어준다. - try: - return face_detect_fan_(img, type3d) - except: - return None, None - - -def get_anchor_box(df_anchor, offset_y, margin, size_stride=32, verbose=False): - # 면적 평균을 구하고 너무(?) 작거나 큰 얼굴은 제거 - # desc = df_anchor['area'].describe() - # area_25, area_75 = desc['25%'], desc['75%'] - # df_anchor = df_anchor.query('@area_25 < area and area < @area_75') - - # z score로 아웃라이어 제거하고 평균 박스 구하기 - boxes = np.array([v for v in df_anchor["box"].values.tolist() if v is not None]) - center_xs = boxes[:, [0, 2]].mean(axis=1) - center_ys = boxes[:, [1, 3]].mean(axis=1) - size_xs = boxes[:, 2] - boxes[:, 0] - size_ys = boxes[:, 3] - boxes[:, 1] - - ####################################### - # 박스가 하나 뿐이면 죽는 문제 수정. - # xs 혹은 ys 의 값이 모두 같은 값이어서 z 가 nan이되서 죽는 문제 수정 - if len(center_xs) > 1: - center_x = ( - np.mean( - [ - x - for z, x in zip(stats.zscore(center_xs), center_xs) - if abs(z) < 3 or math.isnan(z) - ] - ) - .round() - .astype(np.int32) - ) - else: - center_x = np.mean(center_xs).round().astype(np.int32) - if len(center_ys) > 1: - center_y = np.mean( - [ - y - for z, y in zip(stats.zscore(center_ys), center_ys) - if abs(z) < 3 or math.isnan(z) - ] - ) - else: - center_y = np.mean(center_ys).round().astype(np.int32) - center_y = int(round(center_y * (1 + offset_y))) - if len(size_xs) > 1: - size_x = ( - np.mean( - [ - x - for z, x in zip(stats.zscore(size_xs), size_xs) - if abs(z) < 3 or math.isnan(z) - ] - ) - .round() - .astype(np.int32) - ) - else: - size_x = np.mean(size_xs).round().astype(np.int32) - if len(size_ys) > 1: - size_y = ( - np.mean( - [ - y - for z, y in zip(stats.zscore(size_ys), size_ys) - if abs(z) < 3 or math.isnan(z) - ] - ) - .round() - .astype(np.int32) - ) - else: - size_y = np.mean(size_ys).round().astype(np.int32) - - # center_x = np.mean([x for z, x in zip(stats.zscore(center_xs), center_xs) if abs(z) < 3]).round().astype(np.int32) - # center_y = np.mean([y for z, y in zip(stats.zscore(center_ys), center_ys) if abs(z) < 3]) - # center_y = int(round(center_y*(1+offset_y))) - # size_x = np.mean([x for z, x in zip(stats.zscore(size_xs), size_xs) if abs(z) < 3]).round().astype(np.int32) - # size_y = np.mean([y for z, y in zip(stats.zscore(size_ys), size_ys) if abs(z) < 3]).round().astype(np.int32) - ####################################### - SS = size_stride - size_step_x = int(math.ceil((size_x * (1 + margin)) / SS) * SS) - size_step_y = int(math.ceil((size_y * (1 + margin)) / SS) * SS) - - x1 = center_x - int(size_step_x * 0.5) - y1 = center_y - int(size_step_y * 0.5) - - y1 = max(0, y1) - - mean_box = [x1, y1, x1 + size_step_x - 1, y1 + size_step_y - 1] - if verbose: - print("mean_box:", mean_box, " width:", size_step_x, " height:", size_step_y) - return mean_box - - -def df_fan_info(frames, box, verbose=False): - x1, y1, x2, y2 = box - - def fan_info(f): - face = f[y1 : y2 + 2, x1 : x2 + 1] - pts2d, box = face_detect_fan(face) - # pts3d, _ = face_detect_fan(face, type3d=True) - pts3d = None - return box, pts2d, pts3d - - def to_full(box, pts2d, pts3d, x1y1): - if box is not None: - box = (box.reshape(-1, 2) + x1y1).reshape(-1) - if pts2d is not None: - pts2d = pts2d + x1y1 - if pts3d is not None: - pts3d = pts3d + (x1y1 + (0,)) - return box, pts2d, pts3d - - fi = [ - fan_info(frames[idx]) - for idx in tqdm(frames, desc="■ fan ", disable=not verbose) - ] - fi = [to_full(*info, (x1, y1)) for info in fi] - - df = pd.DataFrame(fi, columns=["box", "pts2d", "pts3d"]) - df["frame_idx"] = list(frames.keys()) - return df - - -def crop(frames, df_fan, offset_y, margin): - df_fan = df_fan.copy() - - # ToDo: None을 제거해야 됨. crash 발생 - pts2ds = [e for e in df_fan["pts2d"].values if e is not None] - if len(pts2ds): - pts2ds = np.stack(pts2ds) - x1, y1 = pts2ds[:, :, 0].min(), pts2ds[:, :, 1].min() - x2, y2 = pts2ds[:, :, 0].max(), pts2ds[:, :, 1].max() - else: - return None, None - - cx, cy = (x1 + x2) / 2, (y1 + y2) / 2 - sx, sy = (x2 - x1 + 1) * (1 + margin), (y2 - y1 + 1) * (1 + margin) - x1, y1 = cx - sx / 2, cy - sy / 2 - x2, y2 = cx + sx / 2, cy + sy / 2 - - size = x2 - x1 + 1 - offset_y = int(round(size * offset_y)) - y1 = y1 + offset_y - y2 = y1 + size - x1, y1, x2, y2 = np.array([x1, y1, x2, y2]).round().astype(np.int32) - - # print((x1, y1, x2, y2), ((x2-x1+1), (y2-y1+1))) - - # TODO snow: 박스가 이미지를 넘어서는 경우에 대한 방어코드 - # 방어코드를 넣긴했는데, 이렇게 되면 얼굴이 찌그러져서 학습이된다. - # 추후 고민해봐야 한다. - frame_shape = frames[0].shape - x1, y1, x2, y2 = ( - max(0, x1), - max(0, y1), - min(x2, frame_shape[1] - 1), - min(y2, frame_shape[0] - 1), - ) - - cropped_frames = {} - cropped_pts2ds = [] - frame_idxs_ = [] - for _, pts2d, _, frame_idx in df_fan.values: - f = frames[frame_idx] - if pts2d is not None: - cropped_pts2ds.append(pts2d - (x1, y1)) - else: - cropped_pts2ds.append(None) - frame_idxs_.append(frame_idx) - cropped_frames[frame_idx] = f[y1 : y2 + 1, x1 : x2 + 1].copy() - df_fan["cropped_pts2d"] = cropped_pts2ds - df_fan["cropped_box"] = [np.array([x1, y1, x2, y2])] * len(df_fan) - df_fan["cropped_size"] = size - return df_fan, cropped_frames - - -def save_debug_audio(mp4_path, min_idx, max_idx, audio_path): - ac = AudioFileClip(mp4_path) - meta = ff.video_meta(mp4_path) - s, e = min_idx / meta["nframes"], (max_idx + 1) / meta["nframes"] - s, e = s * meta["duration"], e * meta["duration"] - ac = ac.subclip(s, e) - ac.write_audiofile(audio_path, logger=None) - - -def save_audio(mp4_path, audio_path): - ac = AudioFileClip(mp4_path) - ac.write_audiofile(audio_path, logger=None) - - -# snow : 사용하지 않는 코드 일단 주석처리 -# def save_crop_info(anchor_box_path, mp4_path, out_dir, make_mp4=False, -# crop_offset_y = -0.1, crop_margin=0.4, verbose=False): -# df_anchor_i = pd.read_pickle(anchor_box_path) -# -# # 얼굴이 모두 들어가는 박스 크기를 구한다. -# # 여기서 구한 박스에서만 fan 이 얼굴과 피처 포인트를 구한다. -# box = get_anchor_box(df_anchor_i, offset_y=0, margin=1.0) -# -# min_idx, max_idx = df_anchor_i['frame_idx'].values[[0, -1]] -# -# clip_dir = Path(out_dir)/Path(anchor_box_path).stem -# Path(clip_dir).mkdir(exist_ok=True, parents=True) -# -# try: -# save_audio(mp4_path, f'{clip_dir}/audio.wav') -# save_debug_audio(mp4_path, min_idx, max_idx, f'{clip_dir}/audio_debug.wav') -# except: -# # inference 때는 음성 없는 비디오가 들어온다. -# pass -# -# pickle_path = f'{clip_dir}/df_fan.pickle' -# if Path(pickle_path).exists(): -# return pickle_path -# -# frames = ff.extract_frame(mp4_path, min_idx, max_idx+1) -# -# # FAN 이 얼굴과 피처 포인트를 구한다. -# df = df_fan_info(frames, box, verbose=verbose) -# -# # 모델에 입력할 박스를 다시 구해서 crop 한다. -# # crop 박스 영역은 피쳐 포인트 기반으로 구한다. -# df, cropped_frames = crop(frames, df, -# offset_y=crop_offset_y, -# margin=crop_margin) -# if df is None: -# return None -# -# for (idx1, pts), (idx2, im) in zip( -# df[['frame_idx','cropped_pts2d']].values, -# cropped_frames.items()): -# assert idx1 == idx2 -# name = f"""{idx1:05d}_{'yes' if pts is not None else 'no'}.jpg""" -# cv2.imwrite(str(Path(clip_dir)/str(name)), im[:,:,[2,1,0]], [int(cv2.IMWRITE_JPEG_QUALITY), 100]) -# df.to_pickle(pickle_path) -# with open(pickle_path.replace('.pickle', '.txt'), 'w') as f: -# f.write('success') -# -# if make_mp4: -# meta = ff.video_meta(mp4_path) -# debug_clip_path = save_debug_clip(clip_dir, meta['fps']) -# print('saved debug_mp4:', debug_clip_path ) -# -# return pickle_path - - -def save_debug_clip(clip, fps): - jpgs = glob(f"{clip}/*.png") - jpgs = sorted([(int(Path(e).stem.split("_")[0]), imageio.imread(e)) for e in jpgs]) - - fan_pts = pd.read_pickle(Path(clip) / "df_fan.pickle") - fan_pts = fan_pts.set_index("frame_idx")["cropped_pts2d"] - - def draw_pts(im, pts): - im = im.copy() - if pts is not None: - for x, y in pts: - cv2.circle(im, (x, y), radius=1, color=(0, 255, 0)) - return im - - marked = [draw_pts(im, fan_pts[idx]) for idx, im in jpgs] - merged = [np.concatenate([im, m], axis=1) for (idx, im), m in zip(jpgs, marked)] - - sz = merged[0].shape[0] - pw = (sz + 1) // 2 * 2 - sz - merged = [ - np.pad(im, ((0, pw), (0, 0), (0, 0)), mode="constant", constant_values=128) - for im in merged - ] - - audio_clip = AudioFileClip(f"{clip}/audio_debug.wav") - - clip_debug = ImageSequenceClip(merged, fps) - - clip_debug = clip_debug.set_audio(audio_clip) - - save_path = f"{clip}/debug.mp4" - clip_debug.write_videofile(save_path, logger=None) - return save_path - - -def inter_alg(target_size, img): - if isinstance(target_size, tuple): - w, h = target_size - else: - w, h = target_size, target_size - return inter_alg_(w, h, img) - - -def inter_alg_(w, h, img): - if w * h < img.shape[0] * img.shape[1]: - return cv2.INTER_AREA - else: - return cv2.INTER_CUBIC - - -def crop_and_save( - image_size, path, df_fan, offset_y, margin, clip_dir, callback, verbose=False -): - df_fan = df_fan.copy() - - # ToDo: None을 제거해야 됨. crash 발생 - pts2ds = [e for e in df_fan["pts2d"].values if e is not None] - if len(pts2ds): - pts2ds = np.stack(pts2ds) - x1, y1 = pts2ds[:, :, 0].min(), pts2ds[:, :, 1].min() - x2, y2 = pts2ds[:, :, 0].max(), pts2ds[:, :, 1].max() - else: - return None, None - - cx, cy = (x1 + x2) / 2, (y1 + y2) / 2 - sx, sy = (x2 - x1 + 1) * (1 + margin), (y2 - y1 + 1) * (1 + margin) - x1, y1 = cx - sx / 2, cy - sy / 2 - x2, y2 = cx + sx / 2, cy + sy / 2 - - size = x2 - x1 + 1 - offset_y = int(round(size * offset_y)) - y1 = y1 + offset_y - y2 = y1 + size - x1, y1, x2, y2 = np.array([x1, y1, x2, y2]).round().astype(np.int32) - - # print((x1, y1, x2, y2), ((x2-x1+1), (y2-y1+1))) - if path.endswith(".mov") or path.endswith(".webm"): - reader, meta = get_four_channel_ffmpeg_reader(path) - channel_size = 4 - else: # mp4 - reader, meta = get_three_channel_ffmpeg_reader(path) - channel_size = 3 - frame_size = meta["size"] - - # TODO snow: 박스가 이미지를 넘어서는 경우에 대한 방어코드 - # 방어코드를 넣긴했는데, 이렇게 되면 얼굴이 찌그러져서 학습이된다. - # 추후 고민해봐야 한다. - x1, y1, x2, y2 = ( - max(0, x1), - max(0, y1), - min(x2, frame_size[0] - 1), - min(y2, frame_size[1] - 1), - ) - - cropped_pts2ds = [] - for pts2d, frame_idx, f in tqdm( - zip(df_fan["pts2d"].values, df_fan["frame_idx"].values, reader), - total=len(df_fan), - desc="crop_and_save", - disable=not verbose, - ): - f = np.frombuffer(f, dtype=np.uint8) - f = f.reshape(frame_size[1], frame_size[0], channel_size) - if pts2d is not None: - pts2d_resized = pts2d - (x1, y1) - if image_size is not None: - # Updating the scale for x and y with the new image size - scale_x = image_size / (x2 - x1 + 1) - scale_y = image_size / (y2 - y1 + 1) - pts2d_resized[:, 0] = ( - pts2d_resized[:, 0] * scale_x - ) # Scale x-coordinate - pts2d_resized[:, 1] = ( - pts2d_resized[:, 1] * scale_y - ) # Scale y-coordinate - cropped_pts2ds.append(pts2d_resized) - else: - cropped_pts2ds.append(None) - - cropped_frame = f[y1 : y2 + 1, x1 : x2 + 1].copy() - - h, w = cropped_frame.shape[:2] - if image_size is not None: - cropped_frame = resize_adapt(image_size, cropped_frame) - - if channel_size == 3: - name = f"""{frame_idx:05d}_{'yes' if pts2d is not None else 'no'}.jpg""" - cv2.imwrite( - str(Path(clip_dir) / str(name)), - cropped_frame[:, :, [2, 1, 0]], - [int(cv2.IMWRITE_JPEG_QUALITY), 100], - ) - else: - name = f"""{frame_idx:05d}_{'yes' if pts2d is not None else 'no'}.png""" - cv2.imwrite( - str(Path(clip_dir) / str(name)), - cropped_frame[:, :, [2, 1, 0, 3]], - [int(cv2.IMWRITE_PNG_COMPRESSION), 0], - ) - callback((frame_idx + 1) / len(df_fan) * 100) - - df_fan["cropped_pts2d"] = cropped_pts2ds - df_fan["cropped_box"] = [np.array([x1, y1, x2, y2])] * len(df_fan) - df_fan["cropped_size"] = size - return df_fan - - -# df_fan_info 와 기능은 동일하고, 메모리 사용량만 줄임 -def df_fan_info2(path, box, callback=None, verbose=False): - callback1 = callback_inter( - callback, min_per=0, max_per=90, desc="df_fan_info2 - 1", verbose=verbose - ) - callback2 = callback_inter( - callback, min_per=90, max_per=100, desc="df_fan_info2 - 2", verbose=verbose - ) - - x1, y1, x2, y2 = box - - def fan_info(f): - face = f[y1 : y2 + 2, x1 : x2 + 1] - pts2d, box = face_detect_fan(face) - # pts3d, _ = face_detect_fan(face, type3d=True) - pts3d = None - return box, pts2d, pts3d - - def to_full(box, pts2d, pts3d, x1y1): - if box is not None: - box = (box.reshape(-1, 2) + x1y1).reshape(-1) - if pts2d is not None: - pts2d = pts2d + x1y1 - if pts3d is not None: - pts3d = pts3d + (x1y1 + (0,)) - return box, pts2d, pts3d - - def __fan_info(f, size, idx, max_idx): - f = np.frombuffer(f, dtype=np.uint8) - f = f.reshape(size[1], size[0], 3) - # 진행상황 공유 - callback1((idx + 1) / max_idx * 100) - return fan_info(f) - - reader = imageio_ffmpeg.read_frames(str(path)) - meta = reader.__next__() # meta data, e.g. meta["size"] -> (width, height) - size = meta["size"] - - frame_cnt, _ = imageio_ffmpeg.count_frames_and_secs(str(path)) - fi = { - idx: __fan_info(frame, size, idx=idx, max_idx=frame_cnt) - for idx, frame in tqdm( - enumerate(reader), total=frame_cnt, desc="■ fan ", disable=not verbose - ) - } - fi = {idx: to_full(*info, (x1, y1)) for idx, info in fi.items()} - - df = pd.DataFrame(fi.values(), columns=["box", "pts2d", "pts3d"]) - df["frame_idx"] = list(fi.keys()) - callback2(100) - return df - - -def set_no_infer(df_anchor, frame_ranges, column_name): - """ - 주어진 데이터프레임의 특정 열 값을 frame_ranges 내의 인덱스에 대해 None으로 설정합니다. - - Parameters: - df_anchor (pandas.DataFrame): 입력 데이터프레임. 'frame_idx' 열이 포함되어야 합니다. - frame_ranges (list of tuple): 시작 인덱스와 종료 인덱스를 포함하는 튜플의 리스트. - 종료 인덱스는 범위에 포함되지 않습니다. - column_name (str): 업데이트할 열의 이름. 기본값은 'box'. - - Returns: - pandas.DataFrame: 특정 열 값이 업데이트된 데이터프레임 - """ - - if frame_ranges is None: - return df_anchor - - df_anchor = df_anchor.set_index("frame_idx") - - for s, e in frame_ranges: - # 인덱스 범위를 검사하고 필요한 경우 조정 - s = max(s, df_anchor.index.min()) - e = min(e, df_anchor.index.max() + 1) - - # 조정된 범위 내에 있는 인덱스만 선택하여 'box' 열 값을 None으로 설정 - df_anchor.loc[s : e - 1, column_name] = None - - df_anchor = df_anchor.reset_index() - return df_anchor - - -# save_crop_info 와 기능은 동일하고, 메모리 사용량을 줄인 것 -def save_crop_info2( - image_size, - anchor_box_path, - mp4_path, - out_dir, - make_mp4=False, - crop_offset_y=-0.1, - crop_margin=0.4, - no_infer_frames=None, - callback=None, - verbose=False, -): # , is_webm=False): - callback1 = callback_inter( - callback, min_per=0, max_per=5, desc="save_crop_info2 - 1", verbose=verbose - ) - callback2 = callback_inter( - callback, min_per=5, max_per=70, desc="save_crop_info2 - 2", verbose=verbose - ) - callback3 = callback_inter( - callback, min_per=70, max_per=100, desc="save_crop_info2 - 3", verbose=verbose - ) - - df_anchor_i = pd.read_pickle(anchor_box_path) - df_anchor_i = set_no_infer(df_anchor_i, no_infer_frames, "box") - - # 얼굴이 모두 들어가는 박스 크기를 구한다. - # 여기서 구한 박스에서만 fan 이 얼굴과 피처 포인트를 구한다. - box = get_anchor_box(df_anchor_i, offset_y=0, margin=1.0) - - min_idx, max_idx = df_anchor_i["frame_idx"].values[[0, -1]] - - clip_dir = Path(out_dir) / Path(anchor_box_path).stem - Path(clip_dir).mkdir(exist_ok=True, parents=True) - - try: - save_audio(mp4_path, f"{clip_dir}/audio.wav") - save_debug_audio(mp4_path, min_idx, max_idx, f"{clip_dir}/audio_debug.wav") - except: - # inference 때는 음성 없는 비디오가 들어온다. - pass - - pickle_path = f"{clip_dir}/df_fan.pickle" - if Path(pickle_path).exists(): - return pickle_path - - callback1(100) - - # FAN 이 얼굴과 피처 포인트를 구한다. - df = df_fan_info2(mp4_path, box, callback2, verbose=verbose) - df = set_no_infer(df, no_infer_frames, "pts2d") - - # 모델에 입력할 박스를 다시 구해서 crop 한다. - # crop 박스 영역은 피쳐 포인트 기반으로 구한다. - df = crop_and_save( - image_size, - mp4_path, - df, - offset_y=crop_offset_y, - margin=crop_margin, - clip_dir=clip_dir, - callback=callback3, - verbose=verbose, - ) - if df is None: - return None - df.to_pickle(pickle_path) - - with open(pickle_path.replace(".pickle", ".txt"), "w") as f: - f.write("success") - - if make_mp4: - meta = ff.video_meta(mp4_path) - debug_clip_path = save_debug_clip(clip_dir, meta["fps"]) - print("saved debug_mp4:", debug_clip_path) - - return pickle_path - - -# snow : 사용하지 않는 코드 일단 주석처리 -# # save_crop_info2 와 차이점 : 이미지를 resize해서 저장한다. -# def save_crop_info3(anchor_box_path, mp4_path, out_dir, img_size, make_mp4=False, -# crop_offset_y = -0.1, crop_margin=0.4, callback=None, verbose=False): -# -# callback1 = callback_inter(callback, min_per=0, max_per=5, desc='save_crop_info2 - 1', verbose=verbose) -# callback2 = callback_inter(callback, min_per=5, max_per=70, desc='save_crop_info2 - 2', verbose=verbose) -# callback3 = callback_inter(callback, min_per=70, max_per=100, desc='save_crop_info2 - 3', verbose=verbose) -# -# df_anchor_i = pd.read_pickle(anchor_box_path) -# -# # 얼굴이 모두 들어가는 박스 크기를 구한다. -# # 여기서 구한 박스에서만 fan 이 얼굴과 피처 포인트를 구한다. -# box = get_anchor_box(df_anchor_i, offset_y=0, margin=1.0) -# -# min_idx, max_idx = df_anchor_i['frame_idx'].values[[0, -1]] -# -# clip_dir = Path(out_dir)/Path(anchor_box_path).stem -# Path(clip_dir).mkdir(exist_ok=True, parents=True) -# -# try: -# save_audio(mp4_path, f'{clip_dir}/audio.wav') -# save_debug_audio(mp4_path, min_idx, max_idx, f'{clip_dir}/audio_debug.wav') -# except: -# # inference 때는 음성 없는 비디오가 들어온다. -# pass -# -# pickle_path = f'{clip_dir}/df_fan.pickle' -# if Path(pickle_path).exists(): -# return pickle_path -# -# callback1(100) -# -# # FAN 이 얼굴과 피처 포인트를 구한다. -# df = df_fan_info2(mp4_path, box, callback2, verbose=verbose) -# -# # 모델에 입력할 박스를 다시 구해서 crop 한다. -# # crop 박스 영역은 피쳐 포인트 기반으로 구한다. -# df = crop_and_save(mp4_path, df, -# offset_y=crop_offset_y, -# margin=crop_margin, -# clip_dir=clip_dir, -# callback=callback3, -# verbose=verbose) -# -# if df is None: -# return None -# -# resize_for_model(img_size, clip_dir, verbose=verbose) -# -# df.to_pickle(pickle_path) -# with open(pickle_path.replace('.pickle', '.txt'), 'w') as f: -# f.write('success') -# -# if make_mp4: -# meta = ff.video_meta(mp4_path) -# debug_clip_path = save_debug_clip(clip_dir, meta['fps']) -# print('saved debug_mp4:', debug_clip_path ) -# -# return pickle_path - - -def inter_alg_(w, h, img): - if w * h < img.shape[0] * img.shape[1]: - return cv2.INTER_AREA - else: - return cv2.INTER_CUBIC - - -def inter_alg(target_size, img): - if isinstance(target_size, tuple): - w, h = target_size - else: - w, h = target_size, target_size - return inter_alg_(w, h, img) - - -def resize_adapt(sz, img): - h, w, channel_size = img.shape - if h == sz and w == sz: - return img - board = np.full((sz, sz, channel_size), 128, np.uint8) - if True: - # if sz < max(h, w): - r = sz / max(h, w) - h, w = int(round(r * h)), int(round(r * w)) - img = cv2.resize(img, (w, h), inter_alg(sz, img)) - board[(sz - h) // 2 : (sz - h) // 2 + h, (sz - w) // 2 : (sz - w) // 2 + w] = img - return board - - -def read_pickle_preds(dir_name): - df = pd.read_pickle(dir_name / "df_fan.pickle") - preds = df.set_index("frame_idx")["cropped_pts2d"] - # g_cached_pickle[str(dir_name)] = preds - return preds - - -def masking(im, pts): - h, w = im.shape[:2] - im = cv2.fillPoly(im, [pts], (128, 128, 128)) - return im - - -# img_size : (w,h) -def resize_for_model(img_size, clip_dir, verbose=False): - assert type(img_size) == int - fs = glob(str(clip_dir) + "/*.jpg") - if verbose: - print("resize to:", img_size) - print("image len:", len(fs)) - print(str(clip_dir)) - - d = os.path.dirname(fs[0]) - resize_d = f"{d}.resized" - if verbose: - print(resize_d) - os.makedirs(resize_d, exist_ok=True) - for f in tqdm(fs, desc="■ resize ", disable=not verbose): - img = cv2.imread(str(f)) - img = resize_adapt(img_size, img) - f = os.path.basename(f) - cv2.imwrite(f"{resize_d}/{f}", img, [int(cv2.IMWRITE_JPEG_QUALITY), 100]) - - -def compute_max_size(df_fan, margin=0.9): - max_size = 0 - for box in df_fan["box"]: - if box is not None: - x1, y1, x2, y2 = box - box_width, box_height = x2 - x1 + 1, y2 - y1 + 1 - size = max(box_width, box_height) - size_with_margin = size * (1 + margin) - max_size = max(max_size, size_with_margin) - return max_size - - -def get_anchor_boxes_move(path, df_anchor_i, offset_y, margin): - reader = imageio_ffmpeg.read_frames(str(path)) - meta = reader.__next__() # meta data, e.g. meta["size"] -> (width, height) - frame_size = meta["size"] - - boxes = [] - max_size = compute_max_size(df_anchor_i, margin) - - for i, b in enumerate(df_anchor_i["box"]): - x1, y1, x2, y2 = b - cx, cy = (x1 + x2) / 2, (y1 + y2) / 2 - - cy += offset_y * max_size # Shift the box downwards by offset_y * max_size - x1, y1 = cx - max_size / 2, cy - max_size / 2 - x2, y2 = cx + max_size / 2, cy + max_size / 2 - x1, y1, x2, y2 = np.array([x1, y1, x2, y2]).round().astype(np.int32) - - width = x2 - x1 - height = y2 - y1 - - if width > height: - y2 = y1 + width - elif height > width: - x2 = x1 + height - - x1, y1, x2, y2 = ( - max(0, x1), - max(0, y1), - min(x2, frame_size[0]), - min(y2, frame_size[1]), - ) - boxes.append([x1, y1, x2, y2]) - return boxes - - -def crop_and_save_move( - image_size, path, df_fan, offset_y, margin, clip_dir, callback, verbose=False -): - df_fan = df_fan.copy() - max_size = compute_max_size(df_fan, margin) - reader = imageio_ffmpeg.read_frames(str(path)) - meta = reader.__next__() # meta data, e.g. meta["size"] -> (width, height) - - if path.endswith(".mov") or path.endswith(".webm"): - reader, meta = get_four_channel_ffmpeg_reader(path) - channel_size = 4 - else: # mp4 - reader, meta = get_three_channel_ffmpeg_reader(path) - channel_size = 3 - frame_size = meta["size"] - - for b in df_fan["box"]: - try: - x1, y1, x2, y2 = b - cx, cy = (x1 + x2) / 2, (y1 + y2) / 2 - - cy += offset_y * max_size # Shift the box downwards by offset_y * max_size - x1, y1 = cx - max_size / 2, cy - max_size / 2 - x2, y2 = cx + max_size / 2, cy + max_size / 2 - x1, y1, x2, y2 = np.array([x1, y1, x2, y2]).round().astype(np.int32) - - width = x2 - x1 - height = y2 - y1 - - if width > height: - y2 = y1 + width - elif height > width: - x2 = x1 + height - break - except: - pass - cropped_boxes, cropped_pts2ds, cropped_size = [], [], [] - - for (box, pts2d, _, frame_idx), f in tqdm( - zip(df_fan.values, reader), - total=len(df_fan), - desc="crop_and_save_move", - disable=not verbose, - ): - f = np.frombuffer(f, dtype=np.uint8) - f = f.reshape(frame_size[1], frame_size[0], channel_size) - - if box is not None: - x1, y1, x2, y2 = box - cx, cy = (x1 + x2) / 2, (y1 + y2) / 2 - - cy += offset_y * max_size # Shift the box downwards by offset_y * max_size - x1, y1 = cx - max_size / 2, cy - max_size / 2 - x2, y2 = cx + max_size / 2, cy + max_size / 2 - x1, y1, x2, y2 = np.array([x1, y1, x2, y2]).round().astype(np.int32) - - width = x2 - x1 - height = y2 - y1 - - if width > height: - y2 = y1 + width - elif height > width: - x2 = x1 + height - - cropped_boxes.append([x1, y1, x2, y2]) - cropped_size.append([x2 - x1 + 1]) - if pts2d is not None: - pts2d_resized = pts2d - (x1, y1) - if image_size is not None: - # Updating the scale for x and y with the new image size - # scale = image_size / max_size # Use the same scale for both x and y - # pts2d_resized = pts2d_resized * scale - scale_x = image_size / (x2 - x1 + 1) - scale_y = image_size / (y2 - y1 + 1) - pts2d_resized[:, 0] = ( - pts2d_resized[:, 0] * scale_x - ) # Scale x-coordinate - pts2d_resized[:, 1] = ( - pts2d_resized[:, 1] * scale_y - ) # Scale y-coordinate - cropped_pts2ds.append(pts2d_resized) - else: - cropped_pts2ds.append(None) - - else: - cropped_size.append(None) - cropped_boxes.append(None) - cropped_pts2ds.append(None) - - cropped_frame = f[y1 : y2 + 1, x1 : x2 + 1].copy() - - if image_size is not None: - cropped_frame = resize_adapt(image_size, cropped_frame) - - # name = f"""{frame_idx:05d}_{'yes' if pts2d is not None else 'no'}.jpg""" - # cv2.imwrite(str(Path(clip_dir)/str(name)), cropped_frame[:,:,[2,1,0]], [int(cv2.IMWRITE_JPEG_QUALITY), 100]) - if channel_size == 3: - name = f"""{frame_idx:05d}_{'yes' if pts2d is not None else 'no'}.jpg""" - cv2.imwrite( - str(Path(clip_dir) / str(name)), - cropped_frame[:, :, [2, 1, 0]], - [int(cv2.IMWRITE_JPEG_QUALITY), 100], - ) - else: - name = f"""{frame_idx:05d}_{'yes' if pts2d is not None else 'no'}.png""" - cv2.imwrite( - str(Path(clip_dir) / str(name)), - cropped_frame[:, :, [2, 1, 0, 3]], - [int(cv2.IMWRITE_PNG_COMPRESSION), 0], - ) - - callback((frame_idx + 1) / len(df_fan) * 100) - - df_fan["cropped_pts2d"] = cropped_pts2ds - df_fan["cropped_box"] = cropped_boxes - df_fan["cropped_size"] = cropped_size - - return df_fan - - -def df_fan_info_move(path, boxes, callback=None, verbose=False): - callback1 = callback_inter( - callback, min_per=0, max_per=90, desc="df_fan_info_move - 1", verbose=verbose - ) - callback2 = callback_inter( - callback, min_per=90, max_per=100, desc="df_fan_info_move - 2", verbose=verbose - ) - - def fan_info(f, box): - x1, y1, x2, y2 = box - face = f[y1 : y2 + 2, x1 : x2 + 1] - pts2d, box = face_detect_fan(face) - pts3d = None - return box, pts2d, pts3d - - def to_full(box, pts2d, pts3d, x1y1): - if box is not None: - box = (box.reshape(-1, 2) + x1y1).reshape(-1) - if pts2d is not None: - pts2d = pts2d + x1y1 - if pts3d is not None: - pts3d = pts3d + (x1y1 + (0,)) - return box, pts2d, pts3d - - def __fan_info(f, size, idx, max_idx): - f = np.frombuffer(f, dtype=np.uint8) - f = f.reshape(size[1], size[0], 3) - box = boxes[idx] - callback1((idx + 1) / max_idx * 100) - # Run FAN on the cropped face - return fan_info(f, box) - - reader = imageio_ffmpeg.read_frames(str(path)) - meta = reader.__next__() # meta data, e.g. meta["size"] -> (width, height) - size = meta["size"] - - frame_cnt, _ = imageio_ffmpeg.count_frames_and_secs(str(path)) - fi = { - idx: __fan_info(frame, size, idx=idx, max_idx=frame_cnt) - for idx, frame in tqdm( - enumerate(reader), total=frame_cnt, desc="■ fan ", disable=not verbose - ) - } - fi = {idx: to_full(*info, boxes[idx][:2]) for idx, info in fi.items()} - - df = pd.DataFrame(fi.values(), columns=["box", "pts2d", "pts3d"]) - df["frame_idx"] = list(fi.keys()) - callback2(100) - return df - - -def save_crop_info_move( - anchor_box_path, - mp4_path, - out_dir, - image_size, - make_mp4=False, - crop_offset_y=-0.1, - crop_margin=0.4, - callback=None, - verbose=False, -): - callback1 = callback_inter( - callback, min_per=0, max_per=5, desc="save_crop_info_move - 1", verbose=verbose - ) - callback2 = callback_inter( - callback, min_per=5, max_per=70, desc="save_crop_info_move - 2", verbose=verbose - ) - callback3 = callback_inter( - callback, - min_per=70, - max_per=100, - desc="save_crop_info_move - 3", - verbose=verbose, - ) - - df_anchor_i = pd.read_pickle(anchor_box_path) - boxes = get_anchor_boxes_move(mp4_path, df_anchor_i, offset_y=0.0, margin=1.5) - - clip_dir = Path(out_dir) / Path(anchor_box_path).stem - Path(clip_dir).mkdir(exist_ok=True, parents=True) - - pickle_path = f"{clip_dir}/df_fan.pickle" - if Path(pickle_path).exists(): - return pickle_path - - callback1(100) - # FAN 이 얼굴과 피처 포인트를 구한다. - df = df_fan_info_move(mp4_path, boxes, callback2, verbose=verbose) - # 모델에 입력할 박스를 다시 구해서 crop 한다. - # crop 박스 영역은 피쳐 포인트 기반으로 구한다. - df = crop_and_save_move( - image_size, - mp4_path, - df, - offset_y=crop_offset_y, - margin=crop_margin, - clip_dir=clip_dir, - callback=callback3, - verbose=verbose, - ) - if df is None: - return None - df.to_pickle(pickle_path) - with open(pickle_path.replace(".pickle", ".txt"), "w") as f: - f.write("success") - - try: - min_idx = min(df["frame_idx"].values) - max_idx = max(df["frame_idx"].values) - save_audio(mp4_path, f"{clip_dir}/audio.wav") - save_debug_audio(mp4_path, min_idx, max_idx, f"{clip_dir}/audio_debug.wav") - except: - # inference 때는 음성 없는 비디오가 들어온다. - pass - - if make_mp4: - meta = ff.video_meta(mp4_path) - debug_clip_path = save_debug_clip(clip_dir, meta["fps"]) - print("saved debug_mp4:", debug_clip_path) - - return pickle_path diff --git a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/.ipynb_checkpoints/face_finder-checkpoint.py b/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/.ipynb_checkpoints/face_finder-checkpoint.py deleted file mode 100644 index 1008a8010195dc1162851cc83cd9571a87674e9a..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/.ipynb_checkpoints/face_finder-checkpoint.py +++ /dev/null @@ -1,570 +0,0 @@ -import gc -import os -from pathlib import Path - -import cv2 -import imageio -import imageio_ffmpeg -import numpy as np -import pandas as pd -import torch -import torchvision -from facenet_pytorch import MTCNN, InceptionResnetV1 -from moviepy.editor import AudioFileClip, ImageSequenceClip -from PIL import Image -from tqdm.auto import tqdm - -from stf_alternative.util import callback_inter - -g_mtcnn = None -g_recognizer = None -g_device = None - - -# 얼굴 인식 툴킷 -def init_face_finder(device="cuda:0"): - global g_mtcnn - global g_recognizer - global g_device - - if g_mtcnn is None and g_recognizer is None: - g_mtcnn = MTCNN(image_size=166, device=device) - print("load MTCNN ", "success ^ ^" if g_mtcnn is not None else "fail ㅠㅠ") - g_recognizer = InceptionResnetV1(pretrained="vggface2").eval().to(device) - print( - "load g_recognizer ", - "success ^ ^" if g_recognizer is not None else "fail ㅠㅠ", - ) - g_device = device - - -def del_face_finder(): - global g_mtcnn - global g_recognizer - global g_device - if g_mtcnn is not None: - del g_mtcnn - g_mtcnn = None - if g_recognizer is not None: - del g_recognizer - g_recognizer = None - torch.cuda.empty_cache() - - -def find_face(img): - """얼굴 위치 및 임베딩 벡터 구하기 - Arguments: - img: torch.Tensor 또는 ndarray. 분석하고자 하는 사진 - 동작: - img 사진에 등장하는 모든 얼굴을 찾아서 embedding값을 구한다 - 얼굴 영역 box와 embeddig 값을 pandas.DataFrame 형태로 변환한다 - df와 df의 정보값에 대응되는 crop 영역도 함께 리턴한다. - """ - global g_mtcnn - global g_recognizer - - # snow : init_face_finder 을 미리 불러주지 않았으면 여기서 불리도록한다. - init_face_finder() - - if isinstance(img, str): - img = imageio.imread(img) - frame = np.array(img) - df_non_face = pd.DataFrame({"box": [np.nan], "ebd": [np.nan]}) - with torch.no_grad(): - boxes = g_mtcnn.detect(frame) - if boxes[0] is None: - return df_non_face, None - boxes = boxes[0].round().astype(np.int32) - - org = np.array(frame) - - def calc_ebd(box): - x1, y1, x2, y2 = box - crop = org[y1 : y2 + 1, x1 : x2 + 1] - sz = g_mtcnn.image_size - resized = cv2.resize(crop, (sz, sz), cv2.INTER_AREA) - x = torchvision.transforms.functional.to_tensor(resized) - with torch.no_grad(): - ebd = g_recognizer(x.unsqueeze(0).to(g_device)) - return ebd[0].cpu(), crop - - def check_box(x1, y1, x2, y2): - return (0 <= x1 and 0 <= y1) and (x2 < frame.shape[1] and y2 < frame.shape[0]) - - boxes = [box.tolist() for box in boxes if check_box(*box)] - ebds = [calc_ebd(box) for box in boxes] - if len(ebds) == 0: - return df_non_face, None - ebds, face_images = list(zip(*ebds)) - df_face = pd.DataFrame({"box": list(boxes), "ebd": ebds}) - return df_face, face_images - - -class FaceFinder: - def __init__(self, device="cuda:0"): - self.mtcnn = MTCNN(image_size=166, device=device) - self.recognizer = InceptionResnetV1(pretrained="vggface2").eval().to(device) - self.device = device - self.tracker = cv2.TrackerCSRT_create() # cv2.legacy.TrackerMOSSE_create() - self.tracker_initialized = False - # self.last_successful_box = None - self.idx = 0 - - def calc_ebd(self, box, frame): - x1, y1, x2, y2 = box - crop = frame[y1 : y2 + 1, x1 : x2 + 1] - sz = self.mtcnn.image_size - resized = cv2.resize(crop, (sz, sz), cv2.INTER_AREA) - x = torchvision.transforms.functional.to_tensor(resized) - with torch.no_grad(): - ebd = self.recognizer(x.unsqueeze(0).to(g_device)) - return ebd[0].cpu() - - def find_face(self, frame): - if not self.tracker_initialized: - # Run face detection and initialize tracker - boxes = self.mtcnn.detect(frame) - if boxes[0] is not None: - box = boxes[0][0].round().astype(np.int32) - self.tracker_initialized = True - self.tracker.init( - frame, (box[0], box[1], box[2] - box[0], box[3] - box[1]) - ) - ebd = self.calc_ebd(box, frame) - return {"box": box.tolist(), "ebd": ebd.numpy()} - else: - # Update tracker and get new bounding box - (success, bbox) = self.tracker.update(frame) - if success: - x, y, w, h = [int(v) for v in bbox] - box = [x, y, x + w, y + h] - - ebd = self.calc_ebd(box, frame) - return {"box": box, "ebd": ebd.numpy()} - else: - # Tracker failed, reset initialization - self.tracker_initialized = False - - # if self.last_successful_box is not None: - boxes = self.mtcnn.detect(frame) - if boxes[0] is not None: - box = boxes[0][0].round().astype(np.int32) - ebd = self.calc_ebd(box, frame) - return {"box": box, "ebd": ebd.numpy()} - return None - - -""" 주어진 비디오에서 얼굴을 찾아 아나운서 얼굴과 유사도 구해 놓기 """ - - -# 비디오에서 추출 랜던 가능한 프레임 범위중 end 부분 알아내기 -def get_valid_end(path, end=None, stride=1): - vid = imageio.get_reader(path, "ffmpeg") - - if end is None: - end = vid.count_frames() - elif end < 0: - end = vid.count_frames() + 1 + end - - if stride == 1: - return end - - try: - vid.get_data(end - 1) - vid.close() - return end - except: - end = end - 1 - vid.close() - return get_valid_end(path, end, stride) - - -def extract_frame(path, start=0, end=-1, stride=1, verbose=False): - val_end = get_valid_end(path, end, stride) - - vid = imageio.get_reader(path, "ffmpeg") - if end < 0: - end = val_end + 1 + end - if val_end < end: - end = val_end - - frames = {} - for i in tqdm( - range(start, end, stride), - desc=f"extract frame stride({stride}) {Path(path).name}", - disable=not verbose, - ): - try: - f = vid.get_data(i) - except: - w, h = vid.get_meta_data()["size"] - f = np.zeros((h, w, 3), np.uint8) - frames[i] = f - - vid.close() - return frames - - -# 비디오에 나오는 얼굴 임베딩값 구하는 유틸 -def calc_ebds_from_images(frames, verbose=False): - face_infos = { - idx: find_face(frame)[0] - for idx, frame in tqdm( - frames.items(), desc="find_faces for calc_ebd", disable=not verbose - ) - } - for idx, fi in face_infos.items(): - fi["frame_idx"] = idx - return pd.concat(face_infos, ignore_index=True) - - -# 유사도 구하는 유틸 - - -# 얼굴 박스 그려서 보여주기. 다른 사람 얼굴은 붉은색, 아나운서 얼굴은 녹색 -def draw_face(df, frame): - frame = frame.copy() - - boxes = df["box"].values - if 1 < len(boxes): - for x1, y1, x2, y2 in boxes[:-1]: - frame = cv2.rectangle(frame, (x1, y1), (x2, y2), (255, 0, 0), 3) - if 0 < len(boxes): - x1, y1, x2, y2 = boxes[-1] - frame = cv2.rectangle(frame, (x1, y1), (x2, y2), (0, 255, 0), 3) - return frame - - -def show_face(frame_idx, df_face_info, frames): - df = df_face_info.query("frame_idx == @frame_idx") - frame = draw_face(df, frames[frame_idx]) - display(Image.fromarray(frame)) - - -def get_filtered_face(df_face_info, sim_th=0.7): - # 아나운서 얼굴만 나오는 정사각형 영역 구하기 - tqdm.pandas() - - # 유사도 기반으로 아나운서 얼굴만 필터(실제로는 먼저 가장 유사한 얼굴만 골라내기) - df = df_face_info.groupby("frame_idx", as_index=False).apply( - lambda df: df.iloc[-1:] - ) - df = df.drop("ebd", axis=1) - df["area"] = df["box"].map(lambda x: (x[2] - x[0] + 1) * (x[3] - x[1] + 1)) - df = df.query("@sim_th <= similaraty") - return df - - -def get_face_info_(frames, ebd_아나운서, sim_th, verbose=False): - df_face_info = calc_ebds_from_images(frames, verbose=verbose) - df_face_info = df_face_info.dropna(axis=0) - - calc_sim = lambda ebd: (ebd_아나운서 * ebd).sum().item() - df_face_info["similaraty"] = df_face_info["ebd"].map(calc_sim) - df_face_info = df_face_info.sort_values(["frame_idx", "similaraty"]) - - # 유사도 기반으로 아나운서 얼굴만 필터(실제로는 먼저 가장 유사한 얼굴만 골라내기) - return frames, df_face_info, get_filtered_face(df_face_info, sim_th) - - -def get_face_info(path, ebd_아나운서, start=0, end=-1, stride=1, sim_th=0.7, verbose=False): - frames = extract_frame(path, start, end, stride, verbose=verbose) - return get_face_info_(frames, ebd_아나운서, sim_th, verbose=verbose) - - -def get_face_idxs(mp4_path, meta): - STEP_SECONDS = 1 - S = STEP_SECONDS - - pickle_path = f"df_face_info/{Path(mp4_path).stem}.pickle" - df_face_info = pd.read_pickle(pickle_path) - df_f = get_filtered_face(df_face_info, 0.7) - - idxs = sorted(df_f["frame_idx"].tolist()) - - fps = meta["fps"] - - start_idxs = [max(int(idxs[0] - S * fps + 1), 0)] - end_idxs = [] - - prev_idx = start_idxs[-1] - for idx in idxs: - if prev_idx + fps * 10 < idx: - end_idxs.append(int(prev_idx + fps * S - 1)) - start_idxs.append(int(idx - fps * S + 1)) - prev_idx = idx - end_idxs.append(get_valid_end(mp4_path)) - - return list(zip(start_idxs, end_idxs)) - - -def split(mp4_path, ebd_아나운서, start, end, audioclip, meta): - frames_i, df_face_info_i, df_f_i = get_face_info( - mp4_path, ebd_아나운서, start, end, sim_th=0.7 - ) - - idxs = df_f_i["frame_idx"] - start, end = idxs.min(), idxs.max() - - frames_i = {i: f for i, f in frames_i.items() if start <= i and i <= end} - - s, e = start / meta["nframes"], end / meta["nframes"] - - if audioclip is not None: - t = audioclip.duration - a = audioclip.subclip(t_start=t * s, t_end=t * e) - c = ImageSequenceClip(list(frames_i.values()), fps=meta["fps"]) - - c = c.set_audio(a) - else: - c = None - - return c, df_face_info_i, df_f_i - - -def save_splited_face_info(mp4_path, ebd_아나운서, save_clip=False): - meta = video_meta(mp4_path) - - audioclip = AudioFileClip(mp4_path) if save_clip else None - - out_paths = [] - for i, (s, e) in enumerate(get_face_idxs(mp4_path, meta)): - c = extract_frame(mp4_path, s, e) - s, e = np.array(list(c.keys()))[[0, -1]] - e += 1 - clip, df_face_info_i, df_f_i = split(mp4_path, ebd_아나운서, s, e, audioclip, meta) - # df_face_info_i.to_pickle(f'df_face_info_i/{Path(mp4_path).stem}_{i:03d}.pickle') - out_path = f"df_anchor_i/{Path(mp4_path).stem}_{i:03d}.pickle" - os.makedirs(os.path.dirname(out_path), exist_ok=True) - df_f_i.to_pickle(out_path) - out_paths.append(out_path) - if save_clip: - video_name = f"clip/{Path(mp4_path).stem}_{i:03d}.mp4" - os.makedirs(os.path.dirname(video_name), exist_ok=True) - clip.write_videofile(video_name) - return out_paths - - -def save_face_info(mp4_path, ebd_아나운서, base="./df_face_info"): - pickle_path = f"{base}/{Path(mp4_path).stem}.pickle" - - if not Path(pickle_path).exists(): - fps = video_meta(mp4_path)["fps"] - r = get_face_info(mp4_path, ebd_아나운서, 0, -1, stride=(round(fps) * 1)) - frames, df_face_info, df_아나운서_only = r - - os.makedirs(os.path.dirname(pickle_path), exist_ok=True) - df_face_info.to_pickle(pickle_path) - - return save_splited_face_info(mp4_path, ebd_아나운서) - - -def face_info_to_anchor(df, stride, val_end=None): - if val_end is None: - last_idx = df["frame_idx"].max() - val_end = last_idx - rows = [] - for idx in range(val_end + 1): - target_idx = idx // stride * stride - df_search = df.query("frame_idx == @target_idx") - assert len(df_search) > 0 - box, _, _, sim = df_search.iloc[0].values - x1, y1, x2, y2 = box - rows.append([box, idx, sim, (x2 - x1) * (y2 - y1)]) - - df_face_info = pd.DataFrame( - rows, columns=["box", "frame_idx", "sililaraty", "area"] - ) - # df_face_info.head() - return df_face_info - - -def save_face_info2(mp4_path, ebd_아나운서, base="./", verbose=False): - df_face_info_path = os.path.join( - base, "df_face_info", f"{str(Path(mp4_path).stem)}.pickle" - ) - if verbose: - print("save_face_info2 - df_face_info: ", str(df_face_info_path)) - - fps = video_meta(mp4_path)["fps"] - stride = round(fps) * 1 - - if not Path(df_face_info_path).exists(): - r = get_face_info(mp4_path, ebd_아나운서, 0, -1, stride=stride, verbose=verbose) - frames, df_face_info, df_아나운서_only = r - del frames - gc.collect() - os.makedirs(os.path.dirname(df_face_info_path), exist_ok=True) - df_face_info.to_pickle(df_face_info_path) - - dst = Path(base) / "df_anchor_i" / f"{Path(df_face_info_path).stem}_000.pickle" - if verbose: - print("df_anchor_i:", str(dst)) - if not Path(dst).exists(): - os.makedirs(os.path.dirname(dst), exist_ok=True) - df = pd.read_pickle(df_face_info_path) - df_ = df.sort_values("similaraty", ascending=False).drop_duplicates( - ["frame_idx"] - ) - df_ = df_.query("similaraty >= 0.3") - # display(df_.groupby('frame_idx').count()) - # pdb.set_trace() - df_face_info = face_info_to_anchor(df_, stride=stride, val_end=None) - df_face_info.to_pickle(dst) - return [dst] - return [dst] - - -# 메타데이터 추출 유틸 -def video_meta(file): - vid = imageio.get_reader(file, "ffmpeg") - meta = vid.get_meta_data() - meta["path"] = file - meta["nframes"] = vid.count_frames() - vid.close() - return meta - - -# 비디오에 나오는 얼굴 임베딩값 구하는 유틸 -def calc_ebds_from_images2(path, stride, callback=None, verbose=False): - if verbose: - print("calc_ebds_from_images2, ", path) - - def __find_face(f, size): - f = np.frombuffer(f, dtype=np.uint8) - f = f.reshape(size[1], size[0], 3) - return find_face(f)[0] - - reader = imageio_ffmpeg.read_frames(path) - meta = reader.__next__() # meta data, e.g. meta["size"] -> (width, height) - size = meta["size"] - - frame_cnt, _ = imageio_ffmpeg.count_frames_and_secs(path) - face_infos = {} - for idx, frame in tqdm( - enumerate(reader), - total=frame_cnt, - desc="find_faces for calc_ebd", - disable=not verbose, - ): - # 진행상황을 알려준다. - callback((idx + 1) / frame_cnt * 100) - - if idx % stride != 0: - continue - face_infos[idx] = __find_face(frame, size) - - for idx, fi in face_infos.items(): - fi["frame_idx"] = idx - return pd.concat(face_infos, ignore_index=True) - - -def calc_ebds_from_images_move(path, stride, callback=None, verbose=False): - if verbose: - print("calc_ebds_from_images_move, ", path) - - # Initialize the FaceFinder - face_finder = FaceFinder(device="cuda:0") - - reader = imageio_ffmpeg.read_frames(path) - meta = reader.__next__() # meta data, e.g. meta["size"] -> (width, height) - size = meta["size"] - - frame_cnt, _ = imageio_ffmpeg.count_frames_and_secs(path) - face_infos = {} - - for idx, frame in tqdm( - enumerate(reader), - total=frame_cnt, - desc="find_faces for calc_ebd", - disable=not verbose, - ): - # Indicate progress - if callback: - callback((idx + 1) / frame_cnt * 100) - - # Only process frames according to the stride - if idx % stride != 0: - continue - - # Call the new find_face method instead - frame = np.frombuffer(frame, dtype=np.uint8) - frame = frame.reshape(size[1], size[0], 3) - result = face_finder.find_face(frame) - - if result is not None: - face_infos[idx] = result - - # Build DataFrame - records = [] - for idx, info in face_infos.items(): - info["frame_idx"] = idx - records.append(info) - return pd.DataFrame(records) - - -# get_face_info 와 기능은 동일하지만, 메모리 사용을 줄인 버전 -def get_face_info2(path, ebd_아나운서, stride=1, sim_th=0.7, callback=None, verbose=False): - if verbose: - print("get_face_info2") - # if stride==1: - # df_face_info = calc_ebds_from_images_move(path, stride=stride, callback=callback, verbose=verbose) - # else: - df_face_info = calc_ebds_from_images2( - path, stride=stride, callback=callback, verbose=verbose - ) - df_face_info = df_face_info.dropna(axis=0) - - calc_sim = lambda ebd: (ebd_아나운서 * ebd).sum().item() - df_face_info["similaraty"] = df_face_info["ebd"].map(calc_sim) - df_face_info = df_face_info.sort_values(["frame_idx", "similaraty"]) - - # 유사도 기반으로 아나운서 얼굴만 필터(실제로는 먼저 가장 유사한 얼굴만 골라내기) - return df_face_info, get_filtered_face(df_face_info, sim_th) - - -# save_face_info2 와 기능은 동일하나, -# 메모리 적게 사용하도록 개선한 버전 -def save_face_info3(mp4_path, ebd_아나운서, move, base="./", callback=None, verbose=False): - df_face_info_path = os.path.join( - base, "df_face_info", f"{str(Path(mp4_path).stem)}.pickle" - ) - if verbose: - print("save_face_info3 - df_face_info: ", str(df_face_info_path)) - callback1 = callback_inter( - callback, min_per=0, max_per=90, desc="save_face_info3 - 1", verbose=verbose - ) - callback2 = callback_inter( - callback, min_per=90, max_per=100, desc="save_face_info3 - 2", verbose=verbose - ) - - fps = video_meta(mp4_path)["fps"] - if move: - stride = 1 - else: - stride = round(fps) * 1 - if not Path(df_face_info_path).exists(): - r = get_face_info2( - mp4_path, ebd_아나운서, stride=stride, callback=callback1, verbose=verbose - ) - df_face_info, df_아나운서_only = r - os.makedirs(os.path.dirname(df_face_info_path), exist_ok=True) - df_face_info.to_pickle(df_face_info_path) - - dst = Path(base) / "df_anchor_i" / f"{Path(df_face_info_path).stem}_000.pickle" - if verbose: - print("df_anchor_i:", str(dst)) - if not Path(dst).exists(): - os.makedirs(os.path.dirname(dst), exist_ok=True) - df = pd.read_pickle(df_face_info_path) - df_ = df.sort_values("similaraty", ascending=False).drop_duplicates( - ["frame_idx"] - ) - df_ = df_.query("similaraty >= 0.3") - # display(df_.groupby('frame_idx').count()) - # pdb.set_trace() - df_face_info = face_info_to_anchor(df_, stride=stride, val_end=None) - df_face_info.to_pickle(dst) - return [dst] - callback2(100) - return [dst] diff --git a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/.ipynb_checkpoints/make_mels-checkpoint.py b/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/.ipynb_checkpoints/make_mels-checkpoint.py deleted file mode 100644 index 59f38feb5220003ac76189a28fac95660308e57b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/.ipynb_checkpoints/make_mels-checkpoint.py +++ /dev/null @@ -1,99 +0,0 @@ -import librosa -import librosa.filters -import numpy as np -from addict import Dict -from scipy import signal - -# Default hyperparameters -hp = Dict( - num_mels=96, # Number of mel-spectrogram channels and local conditioning dimensionality - n_fft=800, # Extra window size is filled with 0 paddings to match this parameter - hop_size=200, # For 16000Hz, 200 = 12.5 ms (0.0125 * sample_rate) - win_size=800, # For 16000Hz, 800 = 50 ms (If None, win_size = n_fft) (0.05 * sample_rate) - sample_rate=16000, # 16000Hz (corresponding to librispeech) (sox --i ) - # Contribution by @begeekmyfriend - # Spectrogram Pre-Emphasis (Lfilter: Reduce spectrogram noise and helps model certitude - # levels. Also allows for better G&L phase reconstruction) - preemphasis=0.97, # filter coefficient. - # for normalization - max_abs_value=4.0, # max absolute value of data. - # If symmetric, data will be [-max, max] else [0, max] - # (Must not be too big to avoid gradient explosion, - # not too small for fast convergence) - # Limits - min_level_db=-100, - ref_level_db=20, - fmin=55, # Set this to 55 if your speaker is male! if female, 95 should help taking off noise. - # (To test depending on dataset. Pitch info: male~[65, 260], female~[100, 525]) - fmax=7600, # To be increased/reduced depending on data. -) - - -def load_wav(path, sr=hp.sample_rate): - return librosa.core.load(path, sr=sr)[0] - - -def melspectrogram(wav): - D = _stft(preemphasis(wav, hp.preemphasis)) - S = _amp_to_db(_linear_to_mel(np.abs(D))) - hp.ref_level_db - return _normalize(S) - - -def _stft(y): - return librosa.stft( - y=y, n_fft=hp.n_fft, hop_length=hp.hop_size, win_length=hp.win_size - ) - - -def _amp_to_db(x): - min_level = np.exp(hp.min_level_db / 20 * np.log(10)) - return 20 * np.log10(np.maximum(min_level, x)) - - -def preemphasis(wav, k): - return signal.lfilter([1, -k], [1], wav) - - -def _normalize(S): - return np.clip( - (2 * hp.max_abs_value) * ((S - hp.min_level_db) / (-hp.min_level_db)) - - hp.max_abs_value, - -hp.max_abs_value, - hp.max_abs_value, - ) - - -def _build_mel_basis(): - assert hp.fmax <= hp.sample_rate // 2 - return librosa.filters.mel( - hp.sample_rate, hp.n_fft, n_mels=hp.num_mels, fmin=hp.fmin, fmax=hp.fmax - ) - - -def _linear_to_mel(spectogram): - return np.dot(_mel_basis, spectogram) - - -_mel_basis = _build_mel_basis() - - -################################################################# -from pathlib import Path - - -def load_wav_to_mels(wav_path, wav_loaded=None): - assert wav_path is not None or wav_loaded is not None - if wav_path is not None: - wav = load_wav(wav_path) - else: - wav = wav_loaded - spec = melspectrogram(wav) - return spec - - -def save_mels(wav_path): - spec = load_wav_to_mels(wav_path) - mels_path = Path(wav_path).parent / "mels" - - np.savez_compressed(mels_path, spec=spec) - return str(mels_path) + ".npz" diff --git a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/__init__.py b/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/__pycache__/__init__.cpython-310.pyc b/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 888d969cc6ee6f90662b68660f8fbeeb65c489ce..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/__pycache__/crop_with_fan.cpython-310.pyc b/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/__pycache__/crop_with_fan.cpython-310.pyc deleted file mode 100644 index c3c4e6ecd1bba128c5b861b41dc9abc77c0fb7b4..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/__pycache__/crop_with_fan.cpython-310.pyc and /dev/null differ diff --git a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/__pycache__/face_finder.cpython-310.pyc b/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/__pycache__/face_finder.cpython-310.pyc deleted file mode 100644 index d3347f23587ad0715d3635af955d2b49a359b3d5..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/__pycache__/face_finder.cpython-310.pyc and /dev/null differ diff --git a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/crop_with_fan.py b/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/crop_with_fan.py deleted file mode 100644 index eb9ea5dd761fb0fb25879a9f48fdcc4f2fc9cc4f..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/crop_with_fan.py +++ /dev/null @@ -1,1081 +0,0 @@ -import math -import os -import pdb -from glob import glob -from pathlib import Path - -import cv2 -import face_alignment -import imageio -import imageio_ffmpeg -import numpy as np -import pandas as pd -import torch -from moviepy.editor import AudioFileClip, ImageSequenceClip -from scipy import stats -from tqdm.auto import tqdm - -from stf_alternative.util import ( - callback_inter, - get_four_channel_ffmpeg_reader, - get_three_channel_ffmpeg_reader, -) - -from . import face_finder as ff - -g_detector_fan = None -g_detector_fan3d = None - - -def init_fan(device="cuda:0"): - global g_detector_fan - global g_detector_fan3d - if g_detector_fan is None: - try: - g_detector_fan = face_alignment.FaceAlignment( - face_alignment.LandmarksType._2D, flip_input=False, device=device - ) - except AttributeError: - g_detector_fan = face_alignment.FaceAlignment( - face_alignment.LandmarksType.TWO_D, flip_input=False, device=device - ) - if g_detector_fan3d is None: - try: - g_detector_fan3d = face_alignment.FaceAlignment( - face_alignment.LandmarksType._3D, flip_input=False, device=device - ) - except AttributeError: - g_detector_fan3d = face_alignment.FaceAlignment( - face_alignment.LandmarksType.THREE_D, flip_input=False, device=device - ) - - -def del_fan(): - global g_detector_fan - global g_detector_fan3d - if g_detector_fan is not None: - del g_detector_fan - g_detector_fan = None - - if g_detector_fan3d is None: - del g_detector_fan3d - g_detector_fan3d = None - torch.cuda.empty_cache() - - -def fan_box(pred, img, type3d): - if type3d: - xlist, ylist, _ = zip(*pred) - else: - xlist, ylist = zip(*pred) - xlist = [int(round(x)) for x in xlist] - ylist = [int(round(x)) for x in ylist] - y1, y2, x1, x2 = [min(ylist), max(ylist), min(xlist), max(xlist)] - size = max(y2 - y1 + 1, x2 - x1 + 1) - size = int(round(size)) - cx, cy = (x1 + x2) / 2, (y1 + y2) / 2 - x1, y1 = int(round(cx - size / 2)), int(round(cy - size / 2)) - x2, y2 = x1 + size - 1, y1 + size - 1 - - y1 = max(0, y1) - y2 = min(img.shape[0], y2) - x1 = max(0, x1) - x2 = min(img.shape[1], x2) - return (x1, y1, x2, y2) - - -def face_detect_fan_(img, type3d): - global g_detector_fan - global g_detector_fan3d - - # snow : init_fan 을 미리 불러주지 않았으면 여기서 불리도록한다. - init_fan() - - if type3d: - preds = g_detector_fan3d.get_landmarks(img) - else: - preds = g_detector_fan.get_landmarks(img) - - preds = [(fan_box(p, img, type3d), p) for p in preds] - preds = [((b[2] - b[0]) * (b[3] - b[1]), b, p) for b, p in preds] - preds = sorted(preds) - area, (x1, y1, x2, y2), pred = preds[-1] - return np.round((pred)).astype(np.int32), np.array([x1, y1, x2, y2]) - - -def face_detect_fan(img, type3d=False): - # snow : 인사하는 템플릿이 나와서 실제 얼굴이 없는 영역이 나옴에 따라 얼굴없는 경우 처리를 다시 넣어준다. - try: - return face_detect_fan_(img, type3d) - except: - return None, None - - -def get_anchor_box(df_anchor, offset_y, margin, size_stride=32, verbose=False): - # 면적 평균을 구하고 너무(?) 작거나 큰 얼굴은 제거 - # desc = df_anchor['area'].describe() - # area_25, area_75 = desc['25%'], desc['75%'] - # df_anchor = df_anchor.query('@area_25 < area and area < @area_75') - - # z score로 아웃라이어 제거하고 평균 박스 구하기 - boxes = np.array([v for v in df_anchor["box"].values.tolist() if v is not None]) - center_xs = boxes[:, [0, 2]].mean(axis=1) - center_ys = boxes[:, [1, 3]].mean(axis=1) - size_xs = boxes[:, 2] - boxes[:, 0] - size_ys = boxes[:, 3] - boxes[:, 1] - - ####################################### - # 박스가 하나 뿐이면 죽는 문제 수정. - # xs 혹은 ys 의 값이 모두 같은 값이어서 z 가 nan이되서 죽는 문제 수정 - if len(center_xs) > 1: - center_x = ( - np.mean( - [ - x - for z, x in zip(stats.zscore(center_xs), center_xs) - if abs(z) < 3 or math.isnan(z) - ] - ) - .round() - .astype(np.int32) - ) - else: - center_x = np.mean(center_xs).round().astype(np.int32) - if len(center_ys) > 1: - center_y = np.mean( - [ - y - for z, y in zip(stats.zscore(center_ys), center_ys) - if abs(z) < 3 or math.isnan(z) - ] - ) - else: - center_y = np.mean(center_ys).round().astype(np.int32) - center_y = int(round(center_y * (1 + offset_y))) - if len(size_xs) > 1: - size_x = ( - np.mean( - [ - x - for z, x in zip(stats.zscore(size_xs), size_xs) - if abs(z) < 3 or math.isnan(z) - ] - ) - .round() - .astype(np.int32) - ) - else: - size_x = np.mean(size_xs).round().astype(np.int32) - if len(size_ys) > 1: - size_y = ( - np.mean( - [ - y - for z, y in zip(stats.zscore(size_ys), size_ys) - if abs(z) < 3 or math.isnan(z) - ] - ) - .round() - .astype(np.int32) - ) - else: - size_y = np.mean(size_ys).round().astype(np.int32) - - # center_x = np.mean([x for z, x in zip(stats.zscore(center_xs), center_xs) if abs(z) < 3]).round().astype(np.int32) - # center_y = np.mean([y for z, y in zip(stats.zscore(center_ys), center_ys) if abs(z) < 3]) - # center_y = int(round(center_y*(1+offset_y))) - # size_x = np.mean([x for z, x in zip(stats.zscore(size_xs), size_xs) if abs(z) < 3]).round().astype(np.int32) - # size_y = np.mean([y for z, y in zip(stats.zscore(size_ys), size_ys) if abs(z) < 3]).round().astype(np.int32) - ####################################### - SS = size_stride - size_step_x = int(math.ceil((size_x * (1 + margin)) / SS) * SS) - size_step_y = int(math.ceil((size_y * (1 + margin)) / SS) * SS) - - x1 = center_x - int(size_step_x * 0.5) - y1 = center_y - int(size_step_y * 0.5) - - y1 = max(0, y1) - - mean_box = [x1, y1, x1 + size_step_x - 1, y1 + size_step_y - 1] - if verbose: - print("mean_box:", mean_box, " width:", size_step_x, " height:", size_step_y) - return mean_box - - -def df_fan_info(frames, box, verbose=False): - x1, y1, x2, y2 = box - - def fan_info(f): - face = f[y1 : y2 + 2, x1 : x2 + 1] - pts2d, box = face_detect_fan(face) - # pts3d, _ = face_detect_fan(face, type3d=True) - pts3d = None - return box, pts2d, pts3d - - def to_full(box, pts2d, pts3d, x1y1): - if box is not None: - box = (box.reshape(-1, 2) + x1y1).reshape(-1) - if pts2d is not None: - pts2d = pts2d + x1y1 - if pts3d is not None: - pts3d = pts3d + (x1y1 + (0,)) - return box, pts2d, pts3d - - fi = [ - fan_info(frames[idx]) - for idx in tqdm(frames, desc="■ fan ", disable=not verbose) - ] - fi = [to_full(*info, (x1, y1)) for info in fi] - - df = pd.DataFrame(fi, columns=["box", "pts2d", "pts3d"]) - df["frame_idx"] = list(frames.keys()) - return df - - -def crop(frames, df_fan, offset_y, margin): - df_fan = df_fan.copy() - - # ToDo: None을 제거해야 됨. crash 발생 - pts2ds = [e for e in df_fan["pts2d"].values if e is not None] - if len(pts2ds): - pts2ds = np.stack(pts2ds) - x1, y1 = pts2ds[:, :, 0].min(), pts2ds[:, :, 1].min() - x2, y2 = pts2ds[:, :, 0].max(), pts2ds[:, :, 1].max() - else: - return None, None - - cx, cy = (x1 + x2) / 2, (y1 + y2) / 2 - sx, sy = (x2 - x1 + 1) * (1 + margin), (y2 - y1 + 1) * (1 + margin) - x1, y1 = cx - sx / 2, cy - sy / 2 - x2, y2 = cx + sx / 2, cy + sy / 2 - - size = x2 - x1 + 1 - offset_y = int(round(size * offset_y)) - y1 = y1 + offset_y - y2 = y1 + size - x1, y1, x2, y2 = np.array([x1, y1, x2, y2]).round().astype(np.int32) - - # print((x1, y1, x2, y2), ((x2-x1+1), (y2-y1+1))) - - # TODO snow: 박스가 이미지를 넘어서는 경우에 대한 방어코드 - # 방어코드를 넣긴했는데, 이렇게 되면 얼굴이 찌그러져서 학습이된다. - # 추후 고민해봐야 한다. - frame_shape = frames[0].shape - x1, y1, x2, y2 = ( - max(0, x1), - max(0, y1), - min(x2, frame_shape[1] - 1), - min(y2, frame_shape[0] - 1), - ) - - cropped_frames = {} - cropped_pts2ds = [] - frame_idxs_ = [] - for _, pts2d, _, frame_idx in df_fan.values: - f = frames[frame_idx] - if pts2d is not None: - cropped_pts2ds.append(pts2d - (x1, y1)) - else: - cropped_pts2ds.append(None) - frame_idxs_.append(frame_idx) - cropped_frames[frame_idx] = f[y1 : y2 + 1, x1 : x2 + 1].copy() - df_fan["cropped_pts2d"] = cropped_pts2ds - df_fan["cropped_box"] = [np.array([x1, y1, x2, y2])] * len(df_fan) - df_fan["cropped_size"] = size - return df_fan, cropped_frames - - -def save_debug_audio(mp4_path, min_idx, max_idx, audio_path): - ac = AudioFileClip(mp4_path) - meta = ff.video_meta(mp4_path) - s, e = min_idx / meta["nframes"], (max_idx + 1) / meta["nframes"] - s, e = s * meta["duration"], e * meta["duration"] - ac = ac.subclip(s, e) - ac.write_audiofile(audio_path, logger=None) - - -def save_audio(mp4_path, audio_path): - ac = AudioFileClip(mp4_path) - ac.write_audiofile(audio_path, logger=None) - - -# snow : 사용하지 않는 코드 일단 주석처리 -# def save_crop_info(anchor_box_path, mp4_path, out_dir, make_mp4=False, -# crop_offset_y = -0.1, crop_margin=0.4, verbose=False): -# df_anchor_i = pd.read_pickle(anchor_box_path) -# -# # 얼굴이 모두 들어가는 박스 크기를 구한다. -# # 여기서 구한 박스에서만 fan 이 얼굴과 피처 포인트를 구한다. -# box = get_anchor_box(df_anchor_i, offset_y=0, margin=1.0) -# -# min_idx, max_idx = df_anchor_i['frame_idx'].values[[0, -1]] -# -# clip_dir = Path(out_dir)/Path(anchor_box_path).stem -# Path(clip_dir).mkdir(exist_ok=True, parents=True) -# -# try: -# save_audio(mp4_path, f'{clip_dir}/audio.wav') -# save_debug_audio(mp4_path, min_idx, max_idx, f'{clip_dir}/audio_debug.wav') -# except: -# # inference 때는 음성 없는 비디오가 들어온다. -# pass -# -# pickle_path = f'{clip_dir}/df_fan.pickle' -# if Path(pickle_path).exists(): -# return pickle_path -# -# frames = ff.extract_frame(mp4_path, min_idx, max_idx+1) -# -# # FAN 이 얼굴과 피처 포인트를 구한다. -# df = df_fan_info(frames, box, verbose=verbose) -# -# # 모델에 입력할 박스를 다시 구해서 crop 한다. -# # crop 박스 영역은 피쳐 포인트 기반으로 구한다. -# df, cropped_frames = crop(frames, df, -# offset_y=crop_offset_y, -# margin=crop_margin) -# if df is None: -# return None -# -# for (idx1, pts), (idx2, im) in zip( -# df[['frame_idx','cropped_pts2d']].values, -# cropped_frames.items()): -# assert idx1 == idx2 -# name = f"""{idx1:05d}_{'yes' if pts is not None else 'no'}.jpg""" -# cv2.imwrite(str(Path(clip_dir)/str(name)), im[:,:,[2,1,0]], [int(cv2.IMWRITE_JPEG_QUALITY), 100]) -# df.to_pickle(pickle_path) -# with open(pickle_path.replace('.pickle', '.txt'), 'w') as f: -# f.write('success') -# -# if make_mp4: -# meta = ff.video_meta(mp4_path) -# debug_clip_path = save_debug_clip(clip_dir, meta['fps']) -# print('saved debug_mp4:', debug_clip_path ) -# -# return pickle_path - - -def save_debug_clip(clip, fps): - jpgs = glob(f"{clip}/*.png") - jpgs = sorted([(int(Path(e).stem.split("_")[0]), imageio.imread(e)) for e in jpgs]) - - fan_pts = pd.read_pickle(Path(clip) / "df_fan.pickle") - fan_pts = fan_pts.set_index("frame_idx")["cropped_pts2d"] - - def draw_pts(im, pts): - im = im.copy() - if pts is not None: - for x, y in pts: - cv2.circle(im, (x, y), radius=1, color=(0, 255, 0)) - return im - - marked = [draw_pts(im, fan_pts[idx]) for idx, im in jpgs] - merged = [np.concatenate([im, m], axis=1) for (idx, im), m in zip(jpgs, marked)] - - sz = merged[0].shape[0] - pw = (sz + 1) // 2 * 2 - sz - merged = [ - np.pad(im, ((0, pw), (0, 0), (0, 0)), mode="constant", constant_values=128) - for im in merged - ] - - audio_clip = AudioFileClip(f"{clip}/audio_debug.wav") - - clip_debug = ImageSequenceClip(merged, fps) - - clip_debug = clip_debug.set_audio(audio_clip) - - save_path = f"{clip}/debug.mp4" - clip_debug.write_videofile(save_path, logger=None) - return save_path - - -def inter_alg(target_size, img): - if isinstance(target_size, tuple): - w, h = target_size - else: - w, h = target_size, target_size - return inter_alg_(w, h, img) - - -def inter_alg_(w, h, img): - if w * h < img.shape[0] * img.shape[1]: - return cv2.INTER_AREA - else: - return cv2.INTER_CUBIC - - -def crop_and_save( - image_size, path, df_fan, offset_y, margin, clip_dir, callback, verbose=False -): - df_fan = df_fan.copy() - - # ToDo: None을 제거해야 됨. crash 발생 - pts2ds = [e for e in df_fan["pts2d"].values if e is not None] - if len(pts2ds): - pts2ds = np.stack(pts2ds) - x1, y1 = pts2ds[:, :, 0].min(), pts2ds[:, :, 1].min() - x2, y2 = pts2ds[:, :, 0].max(), pts2ds[:, :, 1].max() - else: - return None, None - - cx, cy = (x1 + x2) / 2, (y1 + y2) / 2 - sx, sy = (x2 - x1 + 1) * (1 + margin), (y2 - y1 + 1) * (1 + margin) - x1, y1 = cx - sx / 2, cy - sy / 2 - x2, y2 = cx + sx / 2, cy + sy / 2 - - size = x2 - x1 + 1 - offset_y = int(round(size * offset_y)) - y1 = y1 + offset_y - y2 = y1 + size - x1, y1, x2, y2 = np.array([x1, y1, x2, y2]).round().astype(np.int32) - - # print((x1, y1, x2, y2), ((x2-x1+1), (y2-y1+1))) - if path.endswith(".mov") or path.endswith(".webm"): - reader, meta = get_four_channel_ffmpeg_reader(path) - channel_size = 4 - else: # mp4 - reader, meta = get_three_channel_ffmpeg_reader(path) - channel_size = 3 - frame_size = meta["size"] - - # TODO snow: 박스가 이미지를 넘어서는 경우에 대한 방어코드 - # 방어코드를 넣긴했는데, 이렇게 되면 얼굴이 찌그러져서 학습이된다. - # 추후 고민해봐야 한다. - x1, y1, x2, y2 = ( - max(0, x1), - max(0, y1), - min(x2, frame_size[0] - 1), - min(y2, frame_size[1] - 1), - ) - - cropped_pts2ds = [] - for pts2d, frame_idx, f in tqdm( - zip(df_fan["pts2d"].values, df_fan["frame_idx"].values, reader), - total=len(df_fan), - desc="crop_and_save", - disable=not verbose, - ): - f = np.frombuffer(f, dtype=np.uint8) - f = f.reshape(frame_size[1], frame_size[0], channel_size) - if pts2d is not None: - pts2d_resized = pts2d - (x1, y1) - if image_size is not None: - # Updating the scale for x and y with the new image size - scale_x = image_size / (x2 - x1 + 1) - scale_y = image_size / (y2 - y1 + 1) - pts2d_resized[:, 0] = ( - pts2d_resized[:, 0] * scale_x - ) # Scale x-coordinate - pts2d_resized[:, 1] = ( - pts2d_resized[:, 1] * scale_y - ) # Scale y-coordinate - cropped_pts2ds.append(pts2d_resized) - else: - cropped_pts2ds.append(None) - - cropped_frame = f[y1 : y2 + 1, x1 : x2 + 1].copy() - - h, w = cropped_frame.shape[:2] - if image_size is not None: - cropped_frame = resize_adapt(image_size, cropped_frame) - - if channel_size == 3: - name = f"""{frame_idx:05d}_{'yes' if pts2d is not None else 'no'}.jpg""" - cv2.imwrite( - str(Path(clip_dir) / str(name)), - cropped_frame[:, :, [2, 1, 0]], - [int(cv2.IMWRITE_JPEG_QUALITY), 100], - ) - else: - name = f"""{frame_idx:05d}_{'yes' if pts2d is not None else 'no'}.png""" - cv2.imwrite( - str(Path(clip_dir) / str(name)), - cropped_frame[:, :, [2, 1, 0, 3]], - [int(cv2.IMWRITE_PNG_COMPRESSION), 0], - ) - callback((frame_idx + 1) / len(df_fan) * 100) - - df_fan["cropped_pts2d"] = cropped_pts2ds - df_fan["cropped_box"] = [np.array([x1, y1, x2, y2])] * len(df_fan) - df_fan["cropped_size"] = size - return df_fan - - -# df_fan_info 와 기능은 동일하고, 메모리 사용량만 줄임 -def df_fan_info2(path, box, callback=None, verbose=False): - callback1 = callback_inter( - callback, min_per=0, max_per=90, desc="df_fan_info2 - 1", verbose=verbose - ) - callback2 = callback_inter( - callback, min_per=90, max_per=100, desc="df_fan_info2 - 2", verbose=verbose - ) - - x1, y1, x2, y2 = box - - def fan_info(f): - face = f[y1 : y2 + 2, x1 : x2 + 1] - pts2d, box = face_detect_fan(face) - # pts3d, _ = face_detect_fan(face, type3d=True) - pts3d = None - return box, pts2d, pts3d - - def to_full(box, pts2d, pts3d, x1y1): - if box is not None: - box = (box.reshape(-1, 2) + x1y1).reshape(-1) - if pts2d is not None: - pts2d = pts2d + x1y1 - if pts3d is not None: - pts3d = pts3d + (x1y1 + (0,)) - return box, pts2d, pts3d - - def __fan_info(f, size, idx, max_idx): - f = np.frombuffer(f, dtype=np.uint8) - f = f.reshape(size[1], size[0], 3) - # 진행상황 공유 - callback1((idx + 1) / max_idx * 100) - return fan_info(f) - - reader = imageio_ffmpeg.read_frames(str(path)) - meta = reader.__next__() # meta data, e.g. meta["size"] -> (width, height) - size = meta["size"] - - frame_cnt, _ = imageio_ffmpeg.count_frames_and_secs(str(path)) - fi = { - idx: __fan_info(frame, size, idx=idx, max_idx=frame_cnt) - for idx, frame in tqdm( - enumerate(reader), total=frame_cnt, desc="■ fan ", disable=not verbose - ) - } - fi = {idx: to_full(*info, (x1, y1)) for idx, info in fi.items()} - - df = pd.DataFrame(fi.values(), columns=["box", "pts2d", "pts3d"]) - df["frame_idx"] = list(fi.keys()) - callback2(100) - return df - - -def set_no_infer(df_anchor, frame_ranges, column_name): - """ - 주어진 데이터프레임의 특정 열 값을 frame_ranges 내의 인덱스에 대해 None으로 설정합니다. - - Parameters: - df_anchor (pandas.DataFrame): 입력 데이터프레임. 'frame_idx' 열이 포함되어야 합니다. - frame_ranges (list of tuple): 시작 인덱스와 종료 인덱스를 포함하는 튜플의 리스트. - 종료 인덱스는 범위에 포함되지 않습니다. - column_name (str): 업데이트할 열의 이름. 기본값은 'box'. - - Returns: - pandas.DataFrame: 특정 열 값이 업데이트된 데이터프레임 - """ - - if frame_ranges is None: - return df_anchor - - df_anchor = df_anchor.set_index("frame_idx") - - for s, e in frame_ranges: - # 인덱스 범위를 검사하고 필요한 경우 조정 - s = max(s, df_anchor.index.min()) - e = min(e, df_anchor.index.max() + 1) - - # 조정된 범위 내에 있는 인덱스만 선택하여 'box' 열 값을 None으로 설정 - df_anchor.loc[s : e - 1, column_name] = None - - df_anchor = df_anchor.reset_index() - return df_anchor - - -# save_crop_info 와 기능은 동일하고, 메모리 사용량을 줄인 것 -def save_crop_info2( - image_size, - anchor_box_path, - mp4_path, - out_dir, - make_mp4=False, - crop_offset_y=-0.1, - crop_margin=0.4, - no_infer_frames=None, - callback=None, - verbose=False, -): # , is_webm=False): - callback1 = callback_inter( - callback, min_per=0, max_per=5, desc="save_crop_info2 - 1", verbose=verbose - ) - callback2 = callback_inter( - callback, min_per=5, max_per=70, desc="save_crop_info2 - 2", verbose=verbose - ) - callback3 = callback_inter( - callback, min_per=70, max_per=100, desc="save_crop_info2 - 3", verbose=verbose - ) - - df_anchor_i = pd.read_pickle(anchor_box_path) - df_anchor_i = set_no_infer(df_anchor_i, no_infer_frames, "box") - - # 얼굴이 모두 들어가는 박스 크기를 구한다. - # 여기서 구한 박스에서만 fan 이 얼굴과 피처 포인트를 구한다. - box = get_anchor_box(df_anchor_i, offset_y=0, margin=1.0) - - min_idx, max_idx = df_anchor_i["frame_idx"].values[[0, -1]] - - clip_dir = Path(out_dir) / Path(anchor_box_path).stem - Path(clip_dir).mkdir(exist_ok=True, parents=True) - - try: - save_audio(mp4_path, f"{clip_dir}/audio.wav") - save_debug_audio(mp4_path, min_idx, max_idx, f"{clip_dir}/audio_debug.wav") - except: - # inference 때는 음성 없는 비디오가 들어온다. - pass - - pickle_path = f"{clip_dir}/df_fan.pickle" - if Path(pickle_path).exists(): - return pickle_path - - callback1(100) - - # FAN 이 얼굴과 피처 포인트를 구한다. - df = df_fan_info2(mp4_path, box, callback2, verbose=verbose) - df = set_no_infer(df, no_infer_frames, "pts2d") - - # 모델에 입력할 박스를 다시 구해서 crop 한다. - # crop 박스 영역은 피쳐 포인트 기반으로 구한다. - df = crop_and_save( - image_size, - mp4_path, - df, - offset_y=crop_offset_y, - margin=crop_margin, - clip_dir=clip_dir, - callback=callback3, - verbose=verbose, - ) - if df is None: - return None - df.to_pickle(pickle_path) - - with open(pickle_path.replace(".pickle", ".txt"), "w") as f: - f.write("success") - - if make_mp4: - meta = ff.video_meta(mp4_path) - debug_clip_path = save_debug_clip(clip_dir, meta["fps"]) - print("saved debug_mp4:", debug_clip_path) - - return pickle_path - - -# snow : 사용하지 않는 코드 일단 주석처리 -# # save_crop_info2 와 차이점 : 이미지를 resize해서 저장한다. -# def save_crop_info3(anchor_box_path, mp4_path, out_dir, img_size, make_mp4=False, -# crop_offset_y = -0.1, crop_margin=0.4, callback=None, verbose=False): -# -# callback1 = callback_inter(callback, min_per=0, max_per=5, desc='save_crop_info2 - 1', verbose=verbose) -# callback2 = callback_inter(callback, min_per=5, max_per=70, desc='save_crop_info2 - 2', verbose=verbose) -# callback3 = callback_inter(callback, min_per=70, max_per=100, desc='save_crop_info2 - 3', verbose=verbose) -# -# df_anchor_i = pd.read_pickle(anchor_box_path) -# -# # 얼굴이 모두 들어가는 박스 크기를 구한다. -# # 여기서 구한 박스에서만 fan 이 얼굴과 피처 포인트를 구한다. -# box = get_anchor_box(df_anchor_i, offset_y=0, margin=1.0) -# -# min_idx, max_idx = df_anchor_i['frame_idx'].values[[0, -1]] -# -# clip_dir = Path(out_dir)/Path(anchor_box_path).stem -# Path(clip_dir).mkdir(exist_ok=True, parents=True) -# -# try: -# save_audio(mp4_path, f'{clip_dir}/audio.wav') -# save_debug_audio(mp4_path, min_idx, max_idx, f'{clip_dir}/audio_debug.wav') -# except: -# # inference 때는 음성 없는 비디오가 들어온다. -# pass -# -# pickle_path = f'{clip_dir}/df_fan.pickle' -# if Path(pickle_path).exists(): -# return pickle_path -# -# callback1(100) -# -# # FAN 이 얼굴과 피처 포인트를 구한다. -# df = df_fan_info2(mp4_path, box, callback2, verbose=verbose) -# -# # 모델에 입력할 박스를 다시 구해서 crop 한다. -# # crop 박스 영역은 피쳐 포인트 기반으로 구한다. -# df = crop_and_save(mp4_path, df, -# offset_y=crop_offset_y, -# margin=crop_margin, -# clip_dir=clip_dir, -# callback=callback3, -# verbose=verbose) -# -# if df is None: -# return None -# -# resize_for_model(img_size, clip_dir, verbose=verbose) -# -# df.to_pickle(pickle_path) -# with open(pickle_path.replace('.pickle', '.txt'), 'w') as f: -# f.write('success') -# -# if make_mp4: -# meta = ff.video_meta(mp4_path) -# debug_clip_path = save_debug_clip(clip_dir, meta['fps']) -# print('saved debug_mp4:', debug_clip_path ) -# -# return pickle_path - - -def inter_alg_(w, h, img): - if w * h < img.shape[0] * img.shape[1]: - return cv2.INTER_AREA - else: - return cv2.INTER_CUBIC - - -def inter_alg(target_size, img): - if isinstance(target_size, tuple): - w, h = target_size - else: - w, h = target_size, target_size - return inter_alg_(w, h, img) - - -def resize_adapt(sz, img): - h, w, channel_size = img.shape - if h == sz and w == sz: - return img - board = np.full((sz, sz, channel_size), 128, np.uint8) - if True: - # if sz < max(h, w): - r = sz / max(h, w) - h, w = int(round(r * h)), int(round(r * w)) - img = cv2.resize(img, (w, h), inter_alg(sz, img)) - board[(sz - h) // 2 : (sz - h) // 2 + h, (sz - w) // 2 : (sz - w) // 2 + w] = img - return board - - -def read_pickle_preds(dir_name): - df = pd.read_pickle(dir_name / "df_fan.pickle") - preds = df.set_index("frame_idx")["cropped_pts2d"] - # g_cached_pickle[str(dir_name)] = preds - return preds - - -def masking(im, pts): - h, w = im.shape[:2] - im = cv2.fillPoly(im, [pts], (128, 128, 128)) - return im - - -# img_size : (w,h) -def resize_for_model(img_size, clip_dir, verbose=False): - assert type(img_size) == int - fs = glob(str(clip_dir) + "/*.jpg") - if verbose: - print("resize to:", img_size) - print("image len:", len(fs)) - print(str(clip_dir)) - - d = os.path.dirname(fs[0]) - resize_d = f"{d}.resized" - if verbose: - print(resize_d) - os.makedirs(resize_d, exist_ok=True) - for f in tqdm(fs, desc="■ resize ", disable=not verbose): - img = cv2.imread(str(f)) - img = resize_adapt(img_size, img) - f = os.path.basename(f) - cv2.imwrite(f"{resize_d}/{f}", img, [int(cv2.IMWRITE_JPEG_QUALITY), 100]) - - -def compute_max_size(df_fan, margin=0.9): - max_size = 0 - for box in df_fan["box"]: - if box is not None: - x1, y1, x2, y2 = box - box_width, box_height = x2 - x1 + 1, y2 - y1 + 1 - size = max(box_width, box_height) - size_with_margin = size * (1 + margin) - max_size = max(max_size, size_with_margin) - return max_size - - -def get_anchor_boxes_move(path, df_anchor_i, offset_y, margin): - reader = imageio_ffmpeg.read_frames(str(path)) - meta = reader.__next__() # meta data, e.g. meta["size"] -> (width, height) - frame_size = meta["size"] - - boxes = [] - max_size = compute_max_size(df_anchor_i, margin) - - for i, b in enumerate(df_anchor_i["box"]): - x1, y1, x2, y2 = b - cx, cy = (x1 + x2) / 2, (y1 + y2) / 2 - - cy += offset_y * max_size # Shift the box downwards by offset_y * max_size - x1, y1 = cx - max_size / 2, cy - max_size / 2 - x2, y2 = cx + max_size / 2, cy + max_size / 2 - x1, y1, x2, y2 = np.array([x1, y1, x2, y2]).round().astype(np.int32) - - width = x2 - x1 - height = y2 - y1 - - if width > height: - y2 = y1 + width - elif height > width: - x2 = x1 + height - - x1, y1, x2, y2 = ( - max(0, x1), - max(0, y1), - min(x2, frame_size[0]), - min(y2, frame_size[1]), - ) - boxes.append([x1, y1, x2, y2]) - return boxes - - -def crop_and_save_move( - image_size, path, df_fan, offset_y, margin, clip_dir, callback, verbose=False -): - df_fan = df_fan.copy() - max_size = compute_max_size(df_fan, margin) - reader = imageio_ffmpeg.read_frames(str(path)) - meta = reader.__next__() # meta data, e.g. meta["size"] -> (width, height) - - if path.endswith(".mov") or path.endswith(".webm"): - reader, meta = get_four_channel_ffmpeg_reader(path) - channel_size = 4 - else: # mp4 - reader, meta = get_three_channel_ffmpeg_reader(path) - channel_size = 3 - frame_size = meta["size"] - - for b in df_fan["box"]: - try: - x1, y1, x2, y2 = b - cx, cy = (x1 + x2) / 2, (y1 + y2) / 2 - - cy += offset_y * max_size # Shift the box downwards by offset_y * max_size - x1, y1 = cx - max_size / 2, cy - max_size / 2 - x2, y2 = cx + max_size / 2, cy + max_size / 2 - x1, y1, x2, y2 = np.array([x1, y1, x2, y2]).round().astype(np.int32) - - width = x2 - x1 - height = y2 - y1 - - if width > height: - y2 = y1 + width - elif height > width: - x2 = x1 + height - break - except: - pass - cropped_boxes, cropped_pts2ds, cropped_size = [], [], [] - - for (box, pts2d, _, frame_idx), f in tqdm( - zip(df_fan.values, reader), - total=len(df_fan), - desc="crop_and_save_move", - disable=not verbose, - ): - f = np.frombuffer(f, dtype=np.uint8) - f = f.reshape(frame_size[1], frame_size[0], channel_size) - - if box is not None: - x1, y1, x2, y2 = box - cx, cy = (x1 + x2) / 2, (y1 + y2) / 2 - - cy += offset_y * max_size # Shift the box downwards by offset_y * max_size - x1, y1 = cx - max_size / 2, cy - max_size / 2 - x2, y2 = cx + max_size / 2, cy + max_size / 2 - x1, y1, x2, y2 = np.array([x1, y1, x2, y2]).round().astype(np.int32) - - width = x2 - x1 - height = y2 - y1 - - if width > height: - y2 = y1 + width - elif height > width: - x2 = x1 + height - - cropped_boxes.append([x1, y1, x2, y2]) - cropped_size.append([x2 - x1 + 1]) - if pts2d is not None: - pts2d_resized = pts2d - (x1, y1) - if image_size is not None: - # Updating the scale for x and y with the new image size - # scale = image_size / max_size # Use the same scale for both x and y - # pts2d_resized = pts2d_resized * scale - scale_x = image_size / (x2 - x1 + 1) - scale_y = image_size / (y2 - y1 + 1) - pts2d_resized[:, 0] = ( - pts2d_resized[:, 0] * scale_x - ) # Scale x-coordinate - pts2d_resized[:, 1] = ( - pts2d_resized[:, 1] * scale_y - ) # Scale y-coordinate - cropped_pts2ds.append(pts2d_resized) - else: - cropped_pts2ds.append(None) - - else: - cropped_size.append(None) - cropped_boxes.append(None) - cropped_pts2ds.append(None) - - cropped_frame = f[y1 : y2 + 1, x1 : x2 + 1].copy() - - if image_size is not None: - cropped_frame = resize_adapt(image_size, cropped_frame) - - # name = f"""{frame_idx:05d}_{'yes' if pts2d is not None else 'no'}.jpg""" - # cv2.imwrite(str(Path(clip_dir)/str(name)), cropped_frame[:,:,[2,1,0]], [int(cv2.IMWRITE_JPEG_QUALITY), 100]) - if channel_size == 3: - name = f"""{frame_idx:05d}_{'yes' if pts2d is not None else 'no'}.jpg""" - cv2.imwrite( - str(Path(clip_dir) / str(name)), - cropped_frame[:, :, [2, 1, 0]], - [int(cv2.IMWRITE_JPEG_QUALITY), 100], - ) - else: - name = f"""{frame_idx:05d}_{'yes' if pts2d is not None else 'no'}.png""" - cv2.imwrite( - str(Path(clip_dir) / str(name)), - cropped_frame[:, :, [2, 1, 0, 3]], - [int(cv2.IMWRITE_PNG_COMPRESSION), 0], - ) - - callback((frame_idx + 1) / len(df_fan) * 100) - - df_fan["cropped_pts2d"] = cropped_pts2ds - df_fan["cropped_box"] = cropped_boxes - df_fan["cropped_size"] = cropped_size - - return df_fan - - -def df_fan_info_move(path, boxes, callback=None, verbose=False): - callback1 = callback_inter( - callback, min_per=0, max_per=90, desc="df_fan_info_move - 1", verbose=verbose - ) - callback2 = callback_inter( - callback, min_per=90, max_per=100, desc="df_fan_info_move - 2", verbose=verbose - ) - - def fan_info(f, box): - x1, y1, x2, y2 = box - face = f[y1 : y2 + 2, x1 : x2 + 1] - pts2d, box = face_detect_fan(face) - pts3d = None - return box, pts2d, pts3d - - def to_full(box, pts2d, pts3d, x1y1): - if box is not None: - box = (box.reshape(-1, 2) + x1y1).reshape(-1) - if pts2d is not None: - pts2d = pts2d + x1y1 - if pts3d is not None: - pts3d = pts3d + (x1y1 + (0,)) - return box, pts2d, pts3d - - def __fan_info(f, size, idx, max_idx): - f = np.frombuffer(f, dtype=np.uint8) - f = f.reshape(size[1], size[0], 3) - box = boxes[idx] - callback1((idx + 1) / max_idx * 100) - # Run FAN on the cropped face - return fan_info(f, box) - - reader = imageio_ffmpeg.read_frames(str(path)) - meta = reader.__next__() # meta data, e.g. meta["size"] -> (width, height) - size = meta["size"] - - frame_cnt, _ = imageio_ffmpeg.count_frames_and_secs(str(path)) - fi = { - idx: __fan_info(frame, size, idx=idx, max_idx=frame_cnt) - for idx, frame in tqdm( - enumerate(reader), total=frame_cnt, desc="■ fan ", disable=not verbose - ) - } - fi = {idx: to_full(*info, boxes[idx][:2]) for idx, info in fi.items()} - - df = pd.DataFrame(fi.values(), columns=["box", "pts2d", "pts3d"]) - df["frame_idx"] = list(fi.keys()) - callback2(100) - return df - - -def save_crop_info_move( - anchor_box_path, - mp4_path, - out_dir, - image_size, - make_mp4=False, - crop_offset_y=-0.1, - crop_margin=0.4, - callback=None, - verbose=False, -): - callback1 = callback_inter( - callback, min_per=0, max_per=5, desc="save_crop_info_move - 1", verbose=verbose - ) - callback2 = callback_inter( - callback, min_per=5, max_per=70, desc="save_crop_info_move - 2", verbose=verbose - ) - callback3 = callback_inter( - callback, - min_per=70, - max_per=100, - desc="save_crop_info_move - 3", - verbose=verbose, - ) - - df_anchor_i = pd.read_pickle(anchor_box_path) - boxes = get_anchor_boxes_move(mp4_path, df_anchor_i, offset_y=0.0, margin=1.5) - - clip_dir = Path(out_dir) / Path(anchor_box_path).stem - Path(clip_dir).mkdir(exist_ok=True, parents=True) - - pickle_path = f"{clip_dir}/df_fan.pickle" - if Path(pickle_path).exists(): - return pickle_path - - callback1(100) - # FAN 이 얼굴과 피처 포인트를 구한다. - df = df_fan_info_move(mp4_path, boxes, callback2, verbose=verbose) - # 모델에 입력할 박스를 다시 구해서 crop 한다. - # crop 박스 영역은 피쳐 포인트 기반으로 구한다. - df = crop_and_save_move( - image_size, - mp4_path, - df, - offset_y=crop_offset_y, - margin=crop_margin, - clip_dir=clip_dir, - callback=callback3, - verbose=verbose, - ) - if df is None: - return None - df.to_pickle(pickle_path) - with open(pickle_path.replace(".pickle", ".txt"), "w") as f: - f.write("success") - - try: - min_idx = min(df["frame_idx"].values) - max_idx = max(df["frame_idx"].values) - save_audio(mp4_path, f"{clip_dir}/audio.wav") - save_debug_audio(mp4_path, min_idx, max_idx, f"{clip_dir}/audio_debug.wav") - except: - # inference 때는 음성 없는 비디오가 들어온다. - pass - - if make_mp4: - meta = ff.video_meta(mp4_path) - debug_clip_path = save_debug_clip(clip_dir, meta["fps"]) - print("saved debug_mp4:", debug_clip_path) - - return pickle_path diff --git a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/face_finder.py b/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/face_finder.py deleted file mode 100644 index 1008a8010195dc1162851cc83cd9571a87674e9a..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/face_finder.py +++ /dev/null @@ -1,570 +0,0 @@ -import gc -import os -from pathlib import Path - -import cv2 -import imageio -import imageio_ffmpeg -import numpy as np -import pandas as pd -import torch -import torchvision -from facenet_pytorch import MTCNN, InceptionResnetV1 -from moviepy.editor import AudioFileClip, ImageSequenceClip -from PIL import Image -from tqdm.auto import tqdm - -from stf_alternative.util import callback_inter - -g_mtcnn = None -g_recognizer = None -g_device = None - - -# 얼굴 인식 툴킷 -def init_face_finder(device="cuda:0"): - global g_mtcnn - global g_recognizer - global g_device - - if g_mtcnn is None and g_recognizer is None: - g_mtcnn = MTCNN(image_size=166, device=device) - print("load MTCNN ", "success ^ ^" if g_mtcnn is not None else "fail ㅠㅠ") - g_recognizer = InceptionResnetV1(pretrained="vggface2").eval().to(device) - print( - "load g_recognizer ", - "success ^ ^" if g_recognizer is not None else "fail ㅠㅠ", - ) - g_device = device - - -def del_face_finder(): - global g_mtcnn - global g_recognizer - global g_device - if g_mtcnn is not None: - del g_mtcnn - g_mtcnn = None - if g_recognizer is not None: - del g_recognizer - g_recognizer = None - torch.cuda.empty_cache() - - -def find_face(img): - """얼굴 위치 및 임베딩 벡터 구하기 - Arguments: - img: torch.Tensor 또는 ndarray. 분석하고자 하는 사진 - 동작: - img 사진에 등장하는 모든 얼굴을 찾아서 embedding값을 구한다 - 얼굴 영역 box와 embeddig 값을 pandas.DataFrame 형태로 변환한다 - df와 df의 정보값에 대응되는 crop 영역도 함께 리턴한다. - """ - global g_mtcnn - global g_recognizer - - # snow : init_face_finder 을 미리 불러주지 않았으면 여기서 불리도록한다. - init_face_finder() - - if isinstance(img, str): - img = imageio.imread(img) - frame = np.array(img) - df_non_face = pd.DataFrame({"box": [np.nan], "ebd": [np.nan]}) - with torch.no_grad(): - boxes = g_mtcnn.detect(frame) - if boxes[0] is None: - return df_non_face, None - boxes = boxes[0].round().astype(np.int32) - - org = np.array(frame) - - def calc_ebd(box): - x1, y1, x2, y2 = box - crop = org[y1 : y2 + 1, x1 : x2 + 1] - sz = g_mtcnn.image_size - resized = cv2.resize(crop, (sz, sz), cv2.INTER_AREA) - x = torchvision.transforms.functional.to_tensor(resized) - with torch.no_grad(): - ebd = g_recognizer(x.unsqueeze(0).to(g_device)) - return ebd[0].cpu(), crop - - def check_box(x1, y1, x2, y2): - return (0 <= x1 and 0 <= y1) and (x2 < frame.shape[1] and y2 < frame.shape[0]) - - boxes = [box.tolist() for box in boxes if check_box(*box)] - ebds = [calc_ebd(box) for box in boxes] - if len(ebds) == 0: - return df_non_face, None - ebds, face_images = list(zip(*ebds)) - df_face = pd.DataFrame({"box": list(boxes), "ebd": ebds}) - return df_face, face_images - - -class FaceFinder: - def __init__(self, device="cuda:0"): - self.mtcnn = MTCNN(image_size=166, device=device) - self.recognizer = InceptionResnetV1(pretrained="vggface2").eval().to(device) - self.device = device - self.tracker = cv2.TrackerCSRT_create() # cv2.legacy.TrackerMOSSE_create() - self.tracker_initialized = False - # self.last_successful_box = None - self.idx = 0 - - def calc_ebd(self, box, frame): - x1, y1, x2, y2 = box - crop = frame[y1 : y2 + 1, x1 : x2 + 1] - sz = self.mtcnn.image_size - resized = cv2.resize(crop, (sz, sz), cv2.INTER_AREA) - x = torchvision.transforms.functional.to_tensor(resized) - with torch.no_grad(): - ebd = self.recognizer(x.unsqueeze(0).to(g_device)) - return ebd[0].cpu() - - def find_face(self, frame): - if not self.tracker_initialized: - # Run face detection and initialize tracker - boxes = self.mtcnn.detect(frame) - if boxes[0] is not None: - box = boxes[0][0].round().astype(np.int32) - self.tracker_initialized = True - self.tracker.init( - frame, (box[0], box[1], box[2] - box[0], box[3] - box[1]) - ) - ebd = self.calc_ebd(box, frame) - return {"box": box.tolist(), "ebd": ebd.numpy()} - else: - # Update tracker and get new bounding box - (success, bbox) = self.tracker.update(frame) - if success: - x, y, w, h = [int(v) for v in bbox] - box = [x, y, x + w, y + h] - - ebd = self.calc_ebd(box, frame) - return {"box": box, "ebd": ebd.numpy()} - else: - # Tracker failed, reset initialization - self.tracker_initialized = False - - # if self.last_successful_box is not None: - boxes = self.mtcnn.detect(frame) - if boxes[0] is not None: - box = boxes[0][0].round().astype(np.int32) - ebd = self.calc_ebd(box, frame) - return {"box": box, "ebd": ebd.numpy()} - return None - - -""" 주어진 비디오에서 얼굴을 찾아 아나운서 얼굴과 유사도 구해 놓기 """ - - -# 비디오에서 추출 랜던 가능한 프레임 범위중 end 부분 알아내기 -def get_valid_end(path, end=None, stride=1): - vid = imageio.get_reader(path, "ffmpeg") - - if end is None: - end = vid.count_frames() - elif end < 0: - end = vid.count_frames() + 1 + end - - if stride == 1: - return end - - try: - vid.get_data(end - 1) - vid.close() - return end - except: - end = end - 1 - vid.close() - return get_valid_end(path, end, stride) - - -def extract_frame(path, start=0, end=-1, stride=1, verbose=False): - val_end = get_valid_end(path, end, stride) - - vid = imageio.get_reader(path, "ffmpeg") - if end < 0: - end = val_end + 1 + end - if val_end < end: - end = val_end - - frames = {} - for i in tqdm( - range(start, end, stride), - desc=f"extract frame stride({stride}) {Path(path).name}", - disable=not verbose, - ): - try: - f = vid.get_data(i) - except: - w, h = vid.get_meta_data()["size"] - f = np.zeros((h, w, 3), np.uint8) - frames[i] = f - - vid.close() - return frames - - -# 비디오에 나오는 얼굴 임베딩값 구하는 유틸 -def calc_ebds_from_images(frames, verbose=False): - face_infos = { - idx: find_face(frame)[0] - for idx, frame in tqdm( - frames.items(), desc="find_faces for calc_ebd", disable=not verbose - ) - } - for idx, fi in face_infos.items(): - fi["frame_idx"] = idx - return pd.concat(face_infos, ignore_index=True) - - -# 유사도 구하는 유틸 - - -# 얼굴 박스 그려서 보여주기. 다른 사람 얼굴은 붉은색, 아나운서 얼굴은 녹색 -def draw_face(df, frame): - frame = frame.copy() - - boxes = df["box"].values - if 1 < len(boxes): - for x1, y1, x2, y2 in boxes[:-1]: - frame = cv2.rectangle(frame, (x1, y1), (x2, y2), (255, 0, 0), 3) - if 0 < len(boxes): - x1, y1, x2, y2 = boxes[-1] - frame = cv2.rectangle(frame, (x1, y1), (x2, y2), (0, 255, 0), 3) - return frame - - -def show_face(frame_idx, df_face_info, frames): - df = df_face_info.query("frame_idx == @frame_idx") - frame = draw_face(df, frames[frame_idx]) - display(Image.fromarray(frame)) - - -def get_filtered_face(df_face_info, sim_th=0.7): - # 아나운서 얼굴만 나오는 정사각형 영역 구하기 - tqdm.pandas() - - # 유사도 기반으로 아나운서 얼굴만 필터(실제로는 먼저 가장 유사한 얼굴만 골라내기) - df = df_face_info.groupby("frame_idx", as_index=False).apply( - lambda df: df.iloc[-1:] - ) - df = df.drop("ebd", axis=1) - df["area"] = df["box"].map(lambda x: (x[2] - x[0] + 1) * (x[3] - x[1] + 1)) - df = df.query("@sim_th <= similaraty") - return df - - -def get_face_info_(frames, ebd_아나운서, sim_th, verbose=False): - df_face_info = calc_ebds_from_images(frames, verbose=verbose) - df_face_info = df_face_info.dropna(axis=0) - - calc_sim = lambda ebd: (ebd_아나운서 * ebd).sum().item() - df_face_info["similaraty"] = df_face_info["ebd"].map(calc_sim) - df_face_info = df_face_info.sort_values(["frame_idx", "similaraty"]) - - # 유사도 기반으로 아나운서 얼굴만 필터(실제로는 먼저 가장 유사한 얼굴만 골라내기) - return frames, df_face_info, get_filtered_face(df_face_info, sim_th) - - -def get_face_info(path, ebd_아나운서, start=0, end=-1, stride=1, sim_th=0.7, verbose=False): - frames = extract_frame(path, start, end, stride, verbose=verbose) - return get_face_info_(frames, ebd_아나운서, sim_th, verbose=verbose) - - -def get_face_idxs(mp4_path, meta): - STEP_SECONDS = 1 - S = STEP_SECONDS - - pickle_path = f"df_face_info/{Path(mp4_path).stem}.pickle" - df_face_info = pd.read_pickle(pickle_path) - df_f = get_filtered_face(df_face_info, 0.7) - - idxs = sorted(df_f["frame_idx"].tolist()) - - fps = meta["fps"] - - start_idxs = [max(int(idxs[0] - S * fps + 1), 0)] - end_idxs = [] - - prev_idx = start_idxs[-1] - for idx in idxs: - if prev_idx + fps * 10 < idx: - end_idxs.append(int(prev_idx + fps * S - 1)) - start_idxs.append(int(idx - fps * S + 1)) - prev_idx = idx - end_idxs.append(get_valid_end(mp4_path)) - - return list(zip(start_idxs, end_idxs)) - - -def split(mp4_path, ebd_아나운서, start, end, audioclip, meta): - frames_i, df_face_info_i, df_f_i = get_face_info( - mp4_path, ebd_아나운서, start, end, sim_th=0.7 - ) - - idxs = df_f_i["frame_idx"] - start, end = idxs.min(), idxs.max() - - frames_i = {i: f for i, f in frames_i.items() if start <= i and i <= end} - - s, e = start / meta["nframes"], end / meta["nframes"] - - if audioclip is not None: - t = audioclip.duration - a = audioclip.subclip(t_start=t * s, t_end=t * e) - c = ImageSequenceClip(list(frames_i.values()), fps=meta["fps"]) - - c = c.set_audio(a) - else: - c = None - - return c, df_face_info_i, df_f_i - - -def save_splited_face_info(mp4_path, ebd_아나운서, save_clip=False): - meta = video_meta(mp4_path) - - audioclip = AudioFileClip(mp4_path) if save_clip else None - - out_paths = [] - for i, (s, e) in enumerate(get_face_idxs(mp4_path, meta)): - c = extract_frame(mp4_path, s, e) - s, e = np.array(list(c.keys()))[[0, -1]] - e += 1 - clip, df_face_info_i, df_f_i = split(mp4_path, ebd_아나운서, s, e, audioclip, meta) - # df_face_info_i.to_pickle(f'df_face_info_i/{Path(mp4_path).stem}_{i:03d}.pickle') - out_path = f"df_anchor_i/{Path(mp4_path).stem}_{i:03d}.pickle" - os.makedirs(os.path.dirname(out_path), exist_ok=True) - df_f_i.to_pickle(out_path) - out_paths.append(out_path) - if save_clip: - video_name = f"clip/{Path(mp4_path).stem}_{i:03d}.mp4" - os.makedirs(os.path.dirname(video_name), exist_ok=True) - clip.write_videofile(video_name) - return out_paths - - -def save_face_info(mp4_path, ebd_아나운서, base="./df_face_info"): - pickle_path = f"{base}/{Path(mp4_path).stem}.pickle" - - if not Path(pickle_path).exists(): - fps = video_meta(mp4_path)["fps"] - r = get_face_info(mp4_path, ebd_아나운서, 0, -1, stride=(round(fps) * 1)) - frames, df_face_info, df_아나운서_only = r - - os.makedirs(os.path.dirname(pickle_path), exist_ok=True) - df_face_info.to_pickle(pickle_path) - - return save_splited_face_info(mp4_path, ebd_아나운서) - - -def face_info_to_anchor(df, stride, val_end=None): - if val_end is None: - last_idx = df["frame_idx"].max() - val_end = last_idx - rows = [] - for idx in range(val_end + 1): - target_idx = idx // stride * stride - df_search = df.query("frame_idx == @target_idx") - assert len(df_search) > 0 - box, _, _, sim = df_search.iloc[0].values - x1, y1, x2, y2 = box - rows.append([box, idx, sim, (x2 - x1) * (y2 - y1)]) - - df_face_info = pd.DataFrame( - rows, columns=["box", "frame_idx", "sililaraty", "area"] - ) - # df_face_info.head() - return df_face_info - - -def save_face_info2(mp4_path, ebd_아나운서, base="./", verbose=False): - df_face_info_path = os.path.join( - base, "df_face_info", f"{str(Path(mp4_path).stem)}.pickle" - ) - if verbose: - print("save_face_info2 - df_face_info: ", str(df_face_info_path)) - - fps = video_meta(mp4_path)["fps"] - stride = round(fps) * 1 - - if not Path(df_face_info_path).exists(): - r = get_face_info(mp4_path, ebd_아나운서, 0, -1, stride=stride, verbose=verbose) - frames, df_face_info, df_아나운서_only = r - del frames - gc.collect() - os.makedirs(os.path.dirname(df_face_info_path), exist_ok=True) - df_face_info.to_pickle(df_face_info_path) - - dst = Path(base) / "df_anchor_i" / f"{Path(df_face_info_path).stem}_000.pickle" - if verbose: - print("df_anchor_i:", str(dst)) - if not Path(dst).exists(): - os.makedirs(os.path.dirname(dst), exist_ok=True) - df = pd.read_pickle(df_face_info_path) - df_ = df.sort_values("similaraty", ascending=False).drop_duplicates( - ["frame_idx"] - ) - df_ = df_.query("similaraty >= 0.3") - # display(df_.groupby('frame_idx').count()) - # pdb.set_trace() - df_face_info = face_info_to_anchor(df_, stride=stride, val_end=None) - df_face_info.to_pickle(dst) - return [dst] - return [dst] - - -# 메타데이터 추출 유틸 -def video_meta(file): - vid = imageio.get_reader(file, "ffmpeg") - meta = vid.get_meta_data() - meta["path"] = file - meta["nframes"] = vid.count_frames() - vid.close() - return meta - - -# 비디오에 나오는 얼굴 임베딩값 구하는 유틸 -def calc_ebds_from_images2(path, stride, callback=None, verbose=False): - if verbose: - print("calc_ebds_from_images2, ", path) - - def __find_face(f, size): - f = np.frombuffer(f, dtype=np.uint8) - f = f.reshape(size[1], size[0], 3) - return find_face(f)[0] - - reader = imageio_ffmpeg.read_frames(path) - meta = reader.__next__() # meta data, e.g. meta["size"] -> (width, height) - size = meta["size"] - - frame_cnt, _ = imageio_ffmpeg.count_frames_and_secs(path) - face_infos = {} - for idx, frame in tqdm( - enumerate(reader), - total=frame_cnt, - desc="find_faces for calc_ebd", - disable=not verbose, - ): - # 진행상황을 알려준다. - callback((idx + 1) / frame_cnt * 100) - - if idx % stride != 0: - continue - face_infos[idx] = __find_face(frame, size) - - for idx, fi in face_infos.items(): - fi["frame_idx"] = idx - return pd.concat(face_infos, ignore_index=True) - - -def calc_ebds_from_images_move(path, stride, callback=None, verbose=False): - if verbose: - print("calc_ebds_from_images_move, ", path) - - # Initialize the FaceFinder - face_finder = FaceFinder(device="cuda:0") - - reader = imageio_ffmpeg.read_frames(path) - meta = reader.__next__() # meta data, e.g. meta["size"] -> (width, height) - size = meta["size"] - - frame_cnt, _ = imageio_ffmpeg.count_frames_and_secs(path) - face_infos = {} - - for idx, frame in tqdm( - enumerate(reader), - total=frame_cnt, - desc="find_faces for calc_ebd", - disable=not verbose, - ): - # Indicate progress - if callback: - callback((idx + 1) / frame_cnt * 100) - - # Only process frames according to the stride - if idx % stride != 0: - continue - - # Call the new find_face method instead - frame = np.frombuffer(frame, dtype=np.uint8) - frame = frame.reshape(size[1], size[0], 3) - result = face_finder.find_face(frame) - - if result is not None: - face_infos[idx] = result - - # Build DataFrame - records = [] - for idx, info in face_infos.items(): - info["frame_idx"] = idx - records.append(info) - return pd.DataFrame(records) - - -# get_face_info 와 기능은 동일하지만, 메모리 사용을 줄인 버전 -def get_face_info2(path, ebd_아나운서, stride=1, sim_th=0.7, callback=None, verbose=False): - if verbose: - print("get_face_info2") - # if stride==1: - # df_face_info = calc_ebds_from_images_move(path, stride=stride, callback=callback, verbose=verbose) - # else: - df_face_info = calc_ebds_from_images2( - path, stride=stride, callback=callback, verbose=verbose - ) - df_face_info = df_face_info.dropna(axis=0) - - calc_sim = lambda ebd: (ebd_아나운서 * ebd).sum().item() - df_face_info["similaraty"] = df_face_info["ebd"].map(calc_sim) - df_face_info = df_face_info.sort_values(["frame_idx", "similaraty"]) - - # 유사도 기반으로 아나운서 얼굴만 필터(실제로는 먼저 가장 유사한 얼굴만 골라내기) - return df_face_info, get_filtered_face(df_face_info, sim_th) - - -# save_face_info2 와 기능은 동일하나, -# 메모리 적게 사용하도록 개선한 버전 -def save_face_info3(mp4_path, ebd_아나운서, move, base="./", callback=None, verbose=False): - df_face_info_path = os.path.join( - base, "df_face_info", f"{str(Path(mp4_path).stem)}.pickle" - ) - if verbose: - print("save_face_info3 - df_face_info: ", str(df_face_info_path)) - callback1 = callback_inter( - callback, min_per=0, max_per=90, desc="save_face_info3 - 1", verbose=verbose - ) - callback2 = callback_inter( - callback, min_per=90, max_per=100, desc="save_face_info3 - 2", verbose=verbose - ) - - fps = video_meta(mp4_path)["fps"] - if move: - stride = 1 - else: - stride = round(fps) * 1 - if not Path(df_face_info_path).exists(): - r = get_face_info2( - mp4_path, ebd_아나운서, stride=stride, callback=callback1, verbose=verbose - ) - df_face_info, df_아나운서_only = r - os.makedirs(os.path.dirname(df_face_info_path), exist_ok=True) - df_face_info.to_pickle(df_face_info_path) - - dst = Path(base) / "df_anchor_i" / f"{Path(df_face_info_path).stem}_000.pickle" - if verbose: - print("df_anchor_i:", str(dst)) - if not Path(dst).exists(): - os.makedirs(os.path.dirname(dst), exist_ok=True) - df = pd.read_pickle(df_face_info_path) - df_ = df.sort_values("similaraty", ascending=False).drop_duplicates( - ["frame_idx"] - ) - df_ = df_.query("similaraty >= 0.3") - # display(df_.groupby('frame_idx').count()) - # pdb.set_trace() - df_face_info = face_info_to_anchor(df_, stride=stride, val_end=None) - df_face_info.to_pickle(dst) - return [dst] - callback2(100) - return [dst] diff --git a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/make_mels.py b/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/make_mels.py deleted file mode 100644 index 59f38feb5220003ac76189a28fac95660308e57b..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/preprocess_dir/utils/make_mels.py +++ /dev/null @@ -1,99 +0,0 @@ -import librosa -import librosa.filters -import numpy as np -from addict import Dict -from scipy import signal - -# Default hyperparameters -hp = Dict( - num_mels=96, # Number of mel-spectrogram channels and local conditioning dimensionality - n_fft=800, # Extra window size is filled with 0 paddings to match this parameter - hop_size=200, # For 16000Hz, 200 = 12.5 ms (0.0125 * sample_rate) - win_size=800, # For 16000Hz, 800 = 50 ms (If None, win_size = n_fft) (0.05 * sample_rate) - sample_rate=16000, # 16000Hz (corresponding to librispeech) (sox --i ) - # Contribution by @begeekmyfriend - # Spectrogram Pre-Emphasis (Lfilter: Reduce spectrogram noise and helps model certitude - # levels. Also allows for better G&L phase reconstruction) - preemphasis=0.97, # filter coefficient. - # for normalization - max_abs_value=4.0, # max absolute value of data. - # If symmetric, data will be [-max, max] else [0, max] - # (Must not be too big to avoid gradient explosion, - # not too small for fast convergence) - # Limits - min_level_db=-100, - ref_level_db=20, - fmin=55, # Set this to 55 if your speaker is male! if female, 95 should help taking off noise. - # (To test depending on dataset. Pitch info: male~[65, 260], female~[100, 525]) - fmax=7600, # To be increased/reduced depending on data. -) - - -def load_wav(path, sr=hp.sample_rate): - return librosa.core.load(path, sr=sr)[0] - - -def melspectrogram(wav): - D = _stft(preemphasis(wav, hp.preemphasis)) - S = _amp_to_db(_linear_to_mel(np.abs(D))) - hp.ref_level_db - return _normalize(S) - - -def _stft(y): - return librosa.stft( - y=y, n_fft=hp.n_fft, hop_length=hp.hop_size, win_length=hp.win_size - ) - - -def _amp_to_db(x): - min_level = np.exp(hp.min_level_db / 20 * np.log(10)) - return 20 * np.log10(np.maximum(min_level, x)) - - -def preemphasis(wav, k): - return signal.lfilter([1, -k], [1], wav) - - -def _normalize(S): - return np.clip( - (2 * hp.max_abs_value) * ((S - hp.min_level_db) / (-hp.min_level_db)) - - hp.max_abs_value, - -hp.max_abs_value, - hp.max_abs_value, - ) - - -def _build_mel_basis(): - assert hp.fmax <= hp.sample_rate // 2 - return librosa.filters.mel( - hp.sample_rate, hp.n_fft, n_mels=hp.num_mels, fmin=hp.fmin, fmax=hp.fmax - ) - - -def _linear_to_mel(spectogram): - return np.dot(_mel_basis, spectogram) - - -_mel_basis = _build_mel_basis() - - -################################################################# -from pathlib import Path - - -def load_wav_to_mels(wav_path, wav_loaded=None): - assert wav_path is not None or wav_loaded is not None - if wav_path is not None: - wav = load_wav(wav_path) - else: - wav = wav_loaded - spec = melspectrogram(wav) - return spec - - -def save_mels(wav_path): - spec = load_wav_to_mels(wav_path) - mels_path = Path(wav_path).parent / "mels" - - np.savez_compressed(mels_path, spec=spec) - return str(mels_path) + ".npz" diff --git a/stf/stf-api-alternative/src/stf_alternative/readers.py b/stf/stf-api-alternative/src/stf_alternative/readers.py deleted file mode 100644 index c73dee8602ce3fbdcb618c386860ea8210934882..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/readers.py +++ /dev/null @@ -1,182 +0,0 @@ -import asyncio -import math -from collections import deque -from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor -from glob import glob -from pathlib import Path - -import av -import numpy as np -from PIL import Image -from torch.utils.data import Dataset, default_collate - - -def get_default_video_reader( - data_path, -): - with av.open(str(data_path)) as container: - for frame in container.decode(video=0): - yield frame.to_ndarray( - format="rgb" if data_path.suffix == ".mp4" else "rgba" - ) - - -accepted_format = set([".webp", ".png", ".jpg"]) - - -def read_image(path): - return np.array(Image.open(path).convert("RGBA")) - - -class ImageDataset(Dataset): - def __init__(self, path, num_skip_frames=0): - paths = sorted( - [it for it in glob(f"{path}/*") if Path(it).suffix in accepted_format] - ) - self.paths = paths[num_skip_frames:] + paths[:num_skip_frames] - - def __getitem__(self, idx): - return read_image(self.paths[idx]) - - def __len__(self): - return len(self.paths) - - -class ProcessPoolIterator: - def __init__(self, dataset, preload=8, num_workers=2): - self.pool = ProcessPoolExecutor(num_workers) - self.dataset = dataset - self.queue = deque() - self.preload = preload - - def __iter__(self): - for i in range(min(self.preload, len(self.dataset))): - self.queue.append(self.pool.submit(self.dataset.__getitem__, i)) - - for i in range(self.preload, len(self.dataset)): - self.queue.append(self.pool.submit(self.dataset.__getitem__, i)) - yield self.queue.popleft().result() - - while len(self.queue): - yield self.queue.popleft().result() - - def __len__(self): - return len(self.dataset) - - -class ProcessPoolBatchIterator: - def __init__(self, dataset, batch_size, num_workers=4, drop_last=False): - self.iterator = ProcessPoolIterator( - dataset=dataset, preload=batch_size, num_workers=num_workers - ) - self.batch_size = batch_size - self.drop_last = drop_last - - def __iter__(self): - iterator = iter(self.iterator) - while True: - ret = [] - try: - for i in range(self.batch_size): - ret.append(next(iterator)) - yield default_collate(ret) - except StopIteration as e: - if not self.drop_last and ret: - yield default_collate(ret) - break - - def __len__(self): - return ( - math.floor(len(self.iterator) / self.batch_size) - if self.drop_last - else math.ceil(len(self.iterator) / self.batch_size) - ) - - -class AsyncProcessPoolIterator: - def __init__(self, dataset, preload=8, num_workers=4): - self.pool = ProcessPoolExecutor(num_workers) - self.dataset = dataset - self.queue = deque() - self.preload = preload - - async def __aiter__(self): - loop = asyncio.get_running_loop() - - for i in range(min(self.preload, len(self.dataset))): - self.queue.append( - loop.run_in_executor(self.pool, self.dataset.__getitem__, i) - ) - - for i in range(self.preload, len(self.dataset)): - self.queue.append( - loop.run_in_executor(self.pool, self.dataset.__getitem__, i) - ) - yield await self.queue.popleft() - - while len(self.queue): - yield await self.queue.popleft() - - def __len__(self): - return len(self.dataset) - - -class AsyncProcessPoolBatchIterator: - def __init__(self, dataset, batch_size, num_workers=4, drop_last=False): - self.iterator = AsyncProcessPoolIterator( - dataset=dataset, preload=batch_size, num_workers=num_workers - ) - self.batch_size = batch_size - self.drop_last = drop_last - - async def __aiter__(self): - iterator = aiter(self.iterator) - while True: - ret = [] - try: - for _ in range(self.batch_size): - ret.append(await anext(iterator)) - yield default_collate(ret) - except StopAsyncIteration as e: - if not self.drop_last and ret: - yield default_collate(ret) - break - - def __len__(self): - return ( - math.floor(len(self.iterator) / self.batch_size) - if self.drop_last - else math.ceil(len(self.iterator) / self.batch_size) - ) - - -def get_image_folder_process_reader( - data_path, - num_skip_frames=0, - num_workers=4, - preload=16, -): - dataset = ImageDataset(path=data_path, num_skip_frames=num_skip_frames) - dataloader = ProcessPoolIterator( - dataset=dataset, - num_workers=num_workers, - preload=preload, - ) - - return dataloader - - -def get_image_folder_async_process_reader( - data_path, - num_skip_frames=0, - num_workers=4, - preload=16, -): - dataset = ImageDataset(path=data_path, num_skip_frames=num_skip_frames) - dataloader = AsyncProcessPoolIterator( - dataset=dataset, - num_workers=num_workers, - preload=preload, - ) - - return dataloader diff --git a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/__init__.py b/stf/stf-api-alternative/src/stf_alternative/s2f_dir/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/__pycache__/__init__.cpython-310.pyc b/stf/stf-api-alternative/src/stf_alternative/s2f_dir/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index aa6ac17f07b84672068fbcf1d70641cbdd4854dd..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/.ipynb_checkpoints/autoencoder-checkpoint.py b/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/.ipynb_checkpoints/autoencoder-checkpoint.py deleted file mode 100644 index 99279c110d614ea1fc9c71f82551f39797044621..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/.ipynb_checkpoints/autoencoder-checkpoint.py +++ /dev/null @@ -1,433 +0,0 @@ -import torch -from torch import nn - -# from .conv import (Conv2d, Conv2dTranspose, ResidualConv2d) -from .attentions import * - -bias = True -inplace = False - - -class Conv1d(nn.Module): - def __init__(self, cin, cout, kernel_size, stride, padding, act=True): - super().__init__() - self.block = nn.Sequential( - nn.Conv1d( - cin, - cout, - kernel_size=kernel_size, - stride=stride, - padding=padding, - bias=bias, - ), - nn.BatchNorm1d(cout, momentum=0.8, eps=0.001), - ) - self.relu = nn.ReLU(inplace=inplace) if act else None - - def forward(self, x): - x1 = self.block(x) - if self.relu: - return self.relu(x1) - else: - return x1 - - -class Conv2d(nn.Module): - def __init__(self, cin, cout, kernel_size, stride, padding, act=True): - super().__init__() - self.block = nn.Sequential( - nn.Conv2d( - cin, - cout, - kernel_size=kernel_size, - stride=stride, - padding=padding, - bias=bias, - ), - nn.BatchNorm2d(cout, momentum=0.8, eps=0.001), - ) - self.relu = nn.ReLU(inplace=inplace) if act else None - - def forward(self, x): - x1 = self.block(x) - if self.relu: - return self.relu(x1) - else: - return x1 - - -class Conv2dTranspose(nn.Module): - def __init__( - self, cin, cout, kernel_size, stride, padding, output_padding=0, act=True - ): - super().__init__() - self.conv_block = nn.Sequential( - nn.ConvTranspose2d( - cin, cout, kernel_size, stride, padding, output_padding, bias=bias - ), - nn.BatchNorm2d(cout, momentum=0.8, eps=0.001), - ) - self.relu = nn.ReLU(inplace=inplace) if act else None - - def forward(self, x): - x1 = self.conv_block(x) - if self.relu: - return self.relu(x1) - else: - return x1 - - -class Residual_1d(nn.Module): - def __init__(self, c): - super().__init__() - self.conv1 = Conv1d(c, c, 3, 1, 1) - self.conv2 = Conv1d(c, c, 3, 1, 1) - self.relu = nn.ReLU(inplace=inplace) - - def forward(self, x): - x1 = self.conv1(x) - x2 = self.conv2(x1) - x3 = x2 + x - x3 = self.relu(x3) - return x3 - - -class Residual(nn.Module): - def __init__(self, c): - super().__init__() - self.conv1 = Conv2d(c, c, 3, 1, 1) - self.conv2 = Conv2d(c, c, 3, 1, 1) - self.relu = nn.ReLU(inplace=inplace) - - def forward(self, x): - x1 = self.conv1(x) - x2 = self.conv2(x1) - x3 = x2 + x - x3 = self.relu(x3) - return x3 - - -class Encoder(nn.Module): - def __init__(self, shape): - super().__init__() - - c, h, w = shape - - self.id_map = Conv2d(c, 32, 7, 1, 3) - - self.conv1 = Conv2d(32, 64, 5, 2, 2) - self.residual11 = Residual(64) - self.residual12 = Residual(64) - - self.conv2 = Conv2d(64, 128, 3, 2, 1) - self.residual21 = Residual(128) - self.residual22 = Residual(128) - self.residual23 = Residual(128) - - self.conv3 = Conv2d(128, 256, 3, 2, 1) - self.residual31 = Residual(256) - self.residual32 = Residual(256) - - self.conv4 = Conv2d(256, 512, 3, 2, 1) - self.residual41 = Residual(512) - self.residual42 = Residual(512) - - self.conv5 = Conv2d(512, 512, 3, 2, 1) - - self.conv6 = Conv2d(512, 512, 3, 1, 0) - - kh, kw = ((h + 31) // 32 - 2), ((w + 31) // 32 - 2) - # kh, kw = 2 * (kh // 2) + 1, 2 * (kw // 2) + 1 - self.conv7 = Conv2d(512, 512, (kh, kw), 1, 0) - - def forward(self, x): - id_map = self.id_map(x) # 32: 256, 108, 96 - - ft10 = self.conv1(id_map) # 64: 128, 54, 48 - ft11 = self.residual11(ft10) - ft12 = self.residual12(ft11) - - ft20 = self.conv2(ft12) # 128: 64, 27, 24 - ft21 = self.residual21(ft20) - ft22 = self.residual22(ft21) - ft23 = self.residual23(ft22) - - ft30 = self.conv3(ft23) # 256: 32, 14, 12 - ft31 = self.residual31(ft30) - ft32 = self.residual32(ft31) - - ft40 = self.conv4(ft32) # 512: 16, 7, 6 - ft41 = self.residual41(ft40) - ft42 = self.residual42(ft41) - - ft50 = self.conv5(ft42) # 512: 8, 4, 3 - ft60 = self.conv6(ft50) # 512: 6, 2, 1 - ft70 = self.conv7(ft60) # 512: 1, 1, 1 - - return [id_map, ft12, ft23, ft32, ft42, ft50, ft60, ft70] - - -class EncoderAudio(nn.Module): - def __init__(self, shape): - super().__init__() - - c, h, w = shape # 1, 96, 108 - - self.conv1 = Conv2d(c, 32, 3, 1, 1) - self.residual11 = Residual(32) - self.residual12 = Residual(32) - - self.conv2 = Conv2d(32, 64, 3, 3, 1) - self.residual21 = Residual(64) - self.residual22 = Residual(64) - - # sh, sw = (h + 26) // 27, (w + 26) // 27 - # self.conv3 = Conv2d(64, 128, (5, 5), (sh, sw), (sh//2, sw//2)) - # k = (w+26)//27 # w=108 => k=4 - self.conv3 = Conv2d(64, 128, 3, (3, 3), 1) - self.residual31 = Residual(128) - self.residual32 = Residual(128) - - self.conv4 = Conv2d(128, 256, 3, 3, 1) - self.residual41 = Residual(256) - self.residual42 = Residual(256) - - self.conv5 = Conv2d(256, 512, 4, 1, 0) - - self.conv6 = Conv2d(512, 512, 1, 1, 0) - - def forward(self, x): - ft10 = self.conv1(x) # 96x108 - ft11 = self.residual11(ft10) - ft12 = self.residual12(ft11) - - ft20 = self.conv2(ft12) # 32x36 - ft21 = self.residual21(ft20) - ft22 = self.residual22(ft21) - - ft30 = self.conv3(ft22) # 11x9 | 11x12 - ft31 = self.residual31(ft30) - ft32 = self.residual32(ft31) - - ft40 = self.conv4(ft32) # 4x3 | 4x4 - ft41 = self.residual41(ft40) - ft42 = self.residual42(ft41) - - ft50 = self.conv5(ft42) # 1x1 | 1, 1 - ft60 = self.conv6(ft50) # 1x1 - - return ft60 - - -class EncoderProcessedAudio(nn.Module): - def __init__(self, channel): - super().__init__() - - self.conv1 = Conv1d(channel, 512, 3, 1, 0) - self.residual11 = Residual_1d(512) - self.residual12 = Residual_1d(512) - - self.conv2 = Conv1d(512, 256, 3, 1, 0) - self.residual21 = Residual_1d(256) - self.residual22 = Residual_1d(256) - - self.conv3 = Conv1d(256, 128, 3, 1, 0) - self.residual31 = Residual_1d(128) - self.residual32 = Residual_1d(128) - - self.conv4 = Conv1d(128, 64, 3, 1, 0) - self.residual41 = Residual_1d(64) - self.residual42 = Residual_1d(64) - - self.conv5 = Conv1d(64, 32, 3, 2, 0) - self.residual51 = Residual_1d(32) - self.residual52 = Residual_1d(32) - - self.conv6 = Conv1d(32, 16, 3, 1, 0) - self.residual61 = Residual_1d(16) - self.residual62 = Residual_1d(16) - - def forward(self, x): - x = self.conv1(x.permute(0, 2, 1)) - x = self.residual11(x) - x = self.residual12(x) - - x = self.conv2(x) - x = self.residual21(x) - x = self.residual22(x) - - x = self.conv3(x) - x = self.residual31(x) - x = self.residual32(x) - - x = self.conv4(x) - x = self.residual41(x) - x = self.residual42(x) - - x = self.conv5(x) - x = self.residual51(x) - x = self.residual52(x) - - x = self.conv6(x) - x = self.residual61(x) - x = self.residual62(x) - - x = torch.flatten(x, start_dim=1).unsqueeze(dim=-1).unsqueeze(dim=-1) - - return x - - -class Decoder(nn.Module): - def __init__(self, shape): - super().__init__() - - c, h, w = shape - kh, kw = (h + 31) // 32, (w + 31) // 32 - self.convt1 = Conv2dTranspose(1024, 512, (kh, kw), (kh, kw), 0) - - self.convt2 = Conv2dTranspose(1024, 512, 3, 2, 1, 1) - self.residual21 = Residual(512) - self.residual22 = Residual(512) - - self.convt3 = Conv2dTranspose(1024, 256, 3, 2, 1, 1) - self.residual31 = Residual(256) - self.residual32 = Residual(256) - - self.convt4 = Conv2dTranspose(512, 128, 3, 2, 1, 1) - self.residual41 = Residual(128) - self.residual42 = Residual(128) - - self.convt5 = Conv2dTranspose(256, 64, 3, 2, 1, 1) - self.residual51 = Residual(64) - self.residual52 = Residual(64) - - self.convt6 = Conv2dTranspose(128, 32, 3, 2, 1, 1) - - self.conv7 = Conv2d(64, 16, 3, 1, 1) - self.conv8 = Conv2d(16, 16, 3, 1, 1) - self.conv9 = nn.Conv2d(16, 3, kernel_size=1, stride=1, padding=0) - self.sigmoid = nn.Sigmoid() - self.tanh = nn.Tanh() - - def forward(self, img_ft, audio_ft): - x = torch.cat([img_ft[-1], audio_ft], dim=1) # (B, 1024, 1, 1) - # 256, 96, - - x = self.convt1(x) # (B, 512: 8, 3) - - x = torch.cat([img_ft[5], x], dim=1) - - x = self.convt2(x) # (B, 512: 16, 6) - x = self.residual21(x) - x = self.residual22(x) - x = torch.cat([img_ft[4], x], dim=1) - - x = self.convt3(x) # (B, 256: 32, 12) - x = self.residual31(x) - x = self.residual32(x) - x = torch.cat([img_ft[3], x], dim=1) - - x = self.convt4(x) # (B, 128: 64, 24) - x = self.residual41(x) - x = self.residual42(x) - x = torch.cat([img_ft[2], x], dim=1) - - x = self.convt5(x) # (B, 64: 128, 48) - x = self.residual51(x) - x = self.residual52(x) - x = torch.cat([img_ft[1], x], dim=1) - - x = self.convt6(x) # (B, 32: 256, 96) - x = torch.cat([img_ft[0], x], dim=1) - x = self.conv7(x) # (B, 16: 256, 96) - x = self.conv8(x) # (B, 16: 256, 96) - - x = self.conv9(x) # (B, 3: 256, 96) - x = self.tanh(x) - - return x - - -class Speech2Face(nn.Module): - def __init__( - self, img_num, img_shape, audio_shape, model_type, infer=False, alpha=1.0 - ): - super().__init__() - - self.model_type = model_type - - if self.model_type == "stf_v3": - self.speech_encoder = EncoderProcessedAudio(1024) - else: - self.speech_encoder = EncoderAudio(audio_shape) - - c, h, w = img_shape - c = c * img_num - self.face_encoder = Encoder((c, h, w)) - self.face_decoder = Decoder(img_shape) - self.infer = infer - self.alpha = alpha - self.tanh = nn.Tanh() - self.relu = nn.ReLU() - - if self.model_type in ["aeltu", "stf_v2"]: - self.attention = LocationAwareAttention(512) - """ - elif self.model_type == "stf_v3": - self.audio_att = LocationAwareAttention(512) - self.image_att = LocationAwareAttention(512) - """ - - for m in self.modules(): - if isinstance(m, (nn.Conv1d, nn.Conv2d, nn.ConvTranspose2d)): - if bias == False: - nn.init.kaiming_normal_( - m.weight, mode="fan_out", nonlinearity="relu" - ) - else: - nn.init.xavier_uniform_(m.weight) - if m.bias is not None: - nn.init.constant_(m.bias, 0) - - elif isinstance(m, (nn.BatchNorm1d, nn.BatchNorm2d, nn.GroupNorm)): - nn.init.constant_(m.weight, 1) - nn.init.constant_(m.bias, 0) - - def forward(self, img, audio): - img_e = self.face_encoder(img) - audio_e = self.speech_encoder(audio) - - if self.model_type in ["ae", "stf_v1", "stf_v3"]: - img = self.face_decoder(img_e, audio_e) - elif self.model_type in ["aeltu", "stf_v2"]: - if audio_e.size(0) == 1: - att, _ = self.attention( - audio_e.squeeze().unsqueeze(dim=0).unsqueeze(dim=0), - audio_e.squeeze().unsqueeze(dim=0).unsqueeze(dim=0), - ) - att = att.unsqueeze(dim=-1).unsqueeze(dim=-1) - else: - att, _ = self.attention( - audio_e.squeeze().unsqueeze(dim=1), - audio_e.squeeze().unsqueeze(dim=1), - ) - att = att.squeeze().unsqueeze(dim=-1).unsqueeze(dim=-1) - img = self.face_decoder(img_e, self.tanh(att)) - """ - elif self.model_type == "stf_v3": - if audio_e.size(0) == 1: - a_att,_ = self.audio_att(audio_e.squeeze().unsqueeze(dim=0).unsqueeze(dim=0), audio_e.squeeze().unsqueeze(dim=0).unsqueeze(dim=0)) - a_att = a_att.unsqueeze(dim=-1).unsqueeze(dim=-1) - - i_att,_ = self.image_att(img_e[-1].squeeze().unsqueeze(dim=0).unsqueeze(dim=0), img_e[-1].squeeze().unsqueeze(dim=0).unsqueeze(dim=0)) - i_att = i_att.unsqueeze(dim=-1).unsqueeze(dim=-1) - else: - a_att,_ = self.audio_att(audio_e.squeeze().unsqueeze(dim=1), audio_e.squeeze().unsqueeze(dim=1)) - a_att = a_att.squeeze().unsqueeze(dim=-1).unsqueeze(dim=-1) - - i_att,_ = self.image_att(img_e[-1].squeeze().unsqueeze(dim=1), img_e[-1].squeeze().unsqueeze(dim=1)) - i_att = i_att.squeeze().unsqueeze(dim=-1).unsqueeze(dim=-1) - img_e[-1] = self.tanh(i_att) - img = self.face_decoder(img_e, self.tanh(a_att)) - """ - return img diff --git a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/.ipynb_checkpoints/datagen_aug-checkpoint.py b/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/.ipynb_checkpoints/datagen_aug-checkpoint.py deleted file mode 100644 index 0d93ab42adf0623cc978cbfa33e5e3fb9225ae91..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/.ipynb_checkpoints/datagen_aug-checkpoint.py +++ /dev/null @@ -1,402 +0,0 @@ -import itertools -import os -import pdb -import random -import time -from glob import escape, glob -from pathlib import Path - -import cv2 -import numpy as np -import pandas as pd -import torch -from scipy.interpolate import interp1d -from torch.utils.data import DataLoader, Dataset - -from .mask_history import calc_poly -from .transform_history import mask_img_trsfs - -# snow : LipGanDS.__init__ 에서 계하도록 변경됨 -# half_window_size = 4 -# parameter로 받도록 수정 -# mel_step_size = 27 - - -def frame_id(fname): - return int(os.path.basename(fname).split("_")[0]) - - -def choose_ip_frame(frames, gt_frame, num_ips): - d = os.path.dirname(frames[0]) - return [os.path.join(d, gt_frame) for _ in range(num_ips)] - - -def get_audio_segment(center_frame, spec, mel_step_size, mel_ps, fps, half_window_size): - center_frame_id = frame_id(center_frame) - start_frame_id = center_frame_id - half_window_size - - # start_idx = int((80./25.) * start_frame_id) # 25 is fps of LRS2 - start_idx = int( - (float(mel_ps) / float(fps)) * start_frame_id - ) # mel, frame per sec 에 따라 계산 - if start_idx < 0: - spec = np.pad(spec, ((0, 0), (-start_idx, 0)), mode="edge") - start_idx = 0 - - end_idx = start_idx + mel_step_size - if spec.shape[1] < end_idx: - spec = np.pad(spec, ((0, 0), (0, end_idx - spec.shape[1])), mode="edge") - - # print('center_frame_id:', center_frame_id, ', mel [s,e]', start_idx, end_idx, ', mel shape:', spec.shape) - - return spec[:, start_idx:end_idx] - - -def get_processed_audio_segment(center_frame, processed_wav, fps, sample_rate): - center_frame_id = frame_id(center_frame) - - time_center = center_frame_id / fps - - center_idx = int(time_center * sample_rate) - center_idx = center_idx // 320 - start_idx = center_idx - 39 - - new_logits = processed_wav.copy() - if start_idx < 0: - new_logits = np.pad( - new_logits, ((-start_idx, 0), (0, 0)), mode="constant", constant_values=0 - ) - start_idx = 0 - - end_idx = start_idx + 39 * 2 - if len(new_logits) < end_idx: - new_logits = np.pad( - new_logits, - ((0, end_idx - len(new_logits)), (0, 0)), - mode="constant", - constant_values=0, - ) - - return new_logits[start_idx:end_idx, :] - - -def inter_alg(target_size, img): - if isinstance(target_size, tuple): - w, h = target_size - else: - w, h = target_size, target_size - return inter_alg_(w, h, img) - - -def inter_alg_(w, h, img): - if w * h < img.shape[0] * img.shape[1]: - return cv2.INTER_AREA - else: - return cv2.INTER_CUBIC - - -def resize_adapt(args, img): - sz = args.img_size - h, w = img.shape[:2] - if h == sz and w == sz: - return img - channel_size = img.shape[2] - board = np.full((sz, sz, channel_size), 128, np.uint8) - h, w = img.shape[:2] - if True: - # if sz < max(h, w): - r = sz / max(h, w) - h, w = int(round(r * h)), int(round(r * w)) - img = cv2.resize(img, (w, h), inter_alg(sz, img)) - board[(sz - h) // 2 : (sz - h) // 2 + h, (sz - w) // 2 : (sz - w) // 2 + w] = img - return board - - -def resize_adapt_pts(args, img, pts): - sz = args.img_size - h, w = img.shape[:2] - r = sz / max(h, w) - pts = pts * r - pts = np.round(np.array(pts)).astype(np.int32) - return pts - - -def masking(im, pts): - im = cv2.fillPoly(im, [pts], (128, 128, 128)) - return im - - -def smoothing_mask(pts): - pts = np.array(pts) - x = list(pts[:, 0]) - y = list(pts[:, 1]) - x = x + [x[0]] - y = y + [y[0]] - - # Pad the x and y series so it "wraps around". - # Note that if x and y are numpy arrays, you'll need to - # use np.r_ or np.concatenate instead of addition! - orig_len = len(x) - x = x[-3:-1] + x + x[1:3] - y = y[-3:-1] + y + y[1:3] - - t = np.arange(len(x)) - ti = np.linspace(2, orig_len + 1, 10 * orig_len) - - xi = interp1d(t, x, kind="cubic")(ti) - yi = interp1d(t, y, kind="cubic")(ti) - v = np.stack((xi, yi), axis=1) - v = v.astype(np.int) - - # fig, ax = plt.subplots() - # ax.plot(xi, yi) - # ax.plot(x, y) - # ax.margins(0.05) - # plt.show() - - return v[:-1] - - -def id_map(x, rng=None): - return x - - -def zero_wav_mels_when_silent_center( - mels, mel_ps, zero_mels, zero=-4, t_secs=0.25, verbose=False -): - if t_secs is None: - return {"mel": mels, "changed": False} - t_size = t_secs * mel_ps - _, t_axis = mels.shape - if t_size >= t_axis: - # 원하는 구간이 원래 보고 있는 구간보다 크다면 그대로 준다. - return {"mel": mels, "changed": False} - - t_size_half = int(t_size * 0.5) - if verbose: - print(f"t_axis:{t_axis}, t_size_half: {t_size_half}") - t_axis_s, t_axis_e = int(t_axis / 2) - t_size_half, int(t_axis / 2) + t_size_half - t_axis_s, t_axis_e = max(t_axis_s, 0), min(t_axis_e, t_axis) - if (mels[:, t_axis_s:t_axis_e] == -4).all(): - return {"mel": zero_mels, "changed": True} - - return {"mel": mels, "changed": False} - - -g_cached_fps = {} -g_cached_frames = {} -g_cached_mels = {} -g_cached_pickle = {} - - -class LipGanDS(Dataset): - def __init__(self, images, audios, args, phase, verbose=False): - self.args = args - self.phase = phase - self.mel_step_size = args.mel_step_size - self.mel_ps = args.mel_ps - self.images = images - self.audios = audios - self.mask_ver = ( - list(args.mask_ver) - if isinstance(args.mask_ver, (list, tuple)) - else [args.mask_ver] - ) - self.num_ips = args.num_ips - self.mel_trsf_ver = args.mel_trsf_ver - self.mel_norm_ver = args.mel_norm_ver - self.mels = {} - self.preds = {} - self.verbose = verbose - # snow:96, -4 은 mel 만드는데서 하드코딩되어있는 값이다. - self.zero_mels = np.full((96, self.mel_step_size), -4, dtype=np.float32) - self.silent_secs = ( - None if "silent_secs" not in args.keys() else args["silent_secs"] - ) - self.keying_mask_ver = ( - args.keying_mask_ver if "keying_mask_ver" in self.args else None - ) - if self.verbose and self.silent_secs is not None: - print("!! silent_secs:", self.silent_secs) - - self.smoothing_mask = True if args.smoothing_mask else False - if self.smoothing_mask and self.verbose: - print("!! smoothing_mask is True !!") - - def calc_half_window_size(self, fps): - mel_step_secs = self.mel_step_size * 1.0 / self.mel_ps - a_frame_secs = 1.0 / fps - return int(mel_step_secs / a_frame_secs / 2.0) - - def __len__(self): - return len(self.images) - - def __getitem__(self, idx): - ret_0 = self.choose_image(idx) - if ret_0 is None: - return self[idx + 1] % len(self.images) - - ret_1 = self.choose_audio(idx) - if ret_1 is None: - return self[idx + 1] % len(self.images) - mel = zero_wav_mels_when_silent_center( - mels=ret_1["mel"], - mel_ps=self.mel_ps, - zero_mels=self.zero_mels, - t_secs=self.silent_secs, - verbose=self.verbose, - ) - - return { - "mel": mel["mel"], - "ips": ret_0["ips"], - "mask": ret_0["mask"], - "img_gt_with_alpha": ret_0["img_gt_with_alpha"], - "filename": str(ret_0["filename"]), - } - - def read_fps(self, dir_name): - if str(dir_name) not in g_cached_fps: - with open(dir_name / "fps.txt") as f: - fps = float(f.read()) - return fps - g_cached_fps[str(dir_name)] = fps - return g_cached_fps[str(dir_name)] - - def get_frames(self, dir_name): - if str(dir_name) not in g_cached_frames: - frames = glob(escape(str(dir_name)) + "/*.jpg") - if len(frames) == 0: - frames = glob(escape(str(dir_name)) + "/*.png") - g_cached_frames[str(dir_name)] = frames - return frames - return g_cached_frames[str(dir_name)] - - def load_mel(self, dir_name): - if str(dir_name) not in g_cached_mels: - mel_fname = dir_name / "mels.npz" - if 0 < self.mel_norm_ver: - mel_fname = dir_name / f"mels_v{self.mel_norm_ver}.npz" - - if self.mel_trsf_ver == 0: - if random.randint(0, 1) == 0: - mel_fnames = [dir_name / f"mels_{i:02d}.npz" for i in range(10)] - mel_fname_2 = random.choice(mel_fnames) - if Path(mel_fname_2).exists(): - mel_fname = mel_fname_2 - - with np.load(str(mel_fname)) as f: - mel = f["spec"] - # mel = np.load(str(mel_fname))['spec'] - return mel - g_cached_mels[str(dir_name)] = mel - return g_cached_mels[str(dir_name)] - - def load_processed_wav(self, dir_name, model_type): - if str(dir_name) not in g_cached_mels: - if model_type == "stf_v3": - mel_fname = dir_name / "processed_audio.npy" - mel = np.load(str(mel_fname)) - return mel - g_cached_mels[str(dir_name)] = mel - return g_cached_mels[str(dir_name)] - - def read_pickle(self, dir_name): - if str(dir_name) not in g_cached_pickle: - # print('pikcle_dir:', dir_name) - df = pd.read_pickle(dir_name / "df_fan.pickle") - preds = df.set_index("frame_idx")["cropped_pts2d"] - # g_cached_pickle[str(dir_name)] = preds - return preds - return g_cached_pickle[str(dir_name)] - - def choose_audio(self, idx): - images = self.audios - - args = self.args - img_name = Path(images[idx]) - gt_fname = img_name.name - dir_name = img_name.parent - - if args.model_type == "stf_v3": - mel = self.load_processed_wav(dir_name, args.model_type) - fps = self.read_fps(dir_name) - mel = get_processed_audio_segment(gt_fname, mel, fps, 16000) - else: - if dir_name not in self.mels.keys(): - self.mels[dir_name] = self.load_mel(dir_name) - mel = self.mels[dir_name] - fps = self.read_fps(dir_name) - mel = get_audio_segment( - gt_fname, - mel, - self.mel_step_size, - self.mel_ps, - fps, - self.calc_half_window_size(fps), - ) - - if mel is None or mel.shape[1] != self.mel_step_size: - return None - - if sum(np.isnan(mel.flatten())) > 0: - return None - - if mel is not None and (args.model_type in ["ae", "stf_v1", "aeltu", "stf_v2"]): - mel = mel.astype(np.float32) - - return { - "mel": mel, - } - - def choose_image(self, idx): - images = self.images - - args = self.args - img_name = Path(images[idx]) - gt_fname = img_name.name - dir_name = img_name.parent - - sidx = frame_id(gt_fname) - - img_gt = cv2.imread(str(img_name), cv2.IMREAD_UNCHANGED) - - h, w = img_gt.shape[:2] - assert (h, w) == (args.img_size, args.img_size) - - masked = img_gt[:, :, :3].copy() - img_ip = masked * 2.0 / 255.0 - 1.0 - - if dir_name not in self.preds.keys(): - self.preds[dir_name] = self.read_pickle(dir_name) - - preds = self.preds[dir_name] - if preds[sidx] is None: - # snow : 인사하는 템플릿이 들어오면서 preds 가 없는 경우가 생겼다. - # 이런 경우, 마스크 없이 원래 이미지를 그대로 준다. - mask = np.zeros_like(masked, dtype=np.uint8) - else: - mask_ver = random.choice(self.mask_ver) - pts = calc_poly[mask_ver](preds[sidx], masked.shape[0], randomness=False) - if self.keying_mask_ver is not None: - keying_pts = calc_poly[self.keying_mask_ver]( - preds[sidx], masked.shape[0], randomness=False - ) - else: - keying_pts = pts - - if self.smoothing_mask: - pts = smoothing_mask(pts) - masked = masking(masked, pts) - mask = np.zeros_like(masked, dtype=np.uint8) - mask = masking(mask, keying_pts) - - img_ips = [img_ip for _ in range(self.num_ips)] - ips = np.concatenate([masked * 2.0 / 255.0 - 1.0] + img_ips, axis=2) - - return { - "ips": ips.astype(np.float32), - "mask": mask, - "img_gt_with_alpha": img_gt, - "filename": img_name, - } diff --git a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/__init__.py b/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/__pycache__/__init__.cpython-310.pyc b/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 1796bcca6ef53f9eff7ac7f14bdfe168782b3a6f..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/__pycache__/attentions.cpython-310.pyc b/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/__pycache__/attentions.cpython-310.pyc deleted file mode 100644 index 9df2ce892912fa5ec1e81a64d5563eba81729570..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/__pycache__/attentions.cpython-310.pyc and /dev/null differ diff --git a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/__pycache__/autoencoder.cpython-310.pyc b/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/__pycache__/autoencoder.cpython-310.pyc deleted file mode 100644 index ef5c9dcc36b6cf821e1b6b837110f8e26638ba8d..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/__pycache__/autoencoder.cpython-310.pyc and /dev/null differ diff --git a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/__pycache__/mask_history.cpython-310.pyc b/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/__pycache__/mask_history.cpython-310.pyc deleted file mode 100644 index 66ee178d84a2acd908bc1ea1e591ba45d9311e95..0000000000000000000000000000000000000000 Binary files a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/__pycache__/mask_history.cpython-310.pyc and /dev/null differ diff --git a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/attentions.py b/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/attentions.py deleted file mode 100644 index 0ea3b77dbbf631144b637090789d8257a8fa71e7..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/attentions.py +++ /dev/null @@ -1,537 +0,0 @@ -import math -from typing import Optional, Tuple - -import numpy as np -import torch -import torch.nn as nn -import torch.nn.functional as F -from torch import Tensor - - -class ScaledDotProductAttention(nn.Module): - """ - Scaled Dot-Product Attention proposed in "Attention Is All You Need" - Compute the dot products of the query with all keys, divide each by sqrt(dim), - and apply a softmax function to obtain the weights on the values - Args: dim, mask - dim (int): dimention of attention - mask (torch.Tensor): tensor containing indices to be masked - Inputs: query, key, value, mask - - **query** (batch, q_len, d_model): tensor containing projection vector for decoder. - - **key** (batch, k_len, d_model): tensor containing projection vector for encoder. - - **value** (batch, v_len, d_model): tensor containing features of the encoded input sequence. - - **mask** (-): tensor containing indices to be masked - Returns: context, attn - - **context**: tensor containing the context vector from attention mechanism. - - **attn**: tensor containing the attention (alignment) from the encoder outputs. - """ - - def __init__(self, dim: int): - super(ScaledDotProductAttention, self).__init__() - self.sqrt_dim = np.sqrt(dim) - - def forward( - self, query: Tensor, key: Tensor, value: Tensor, mask: Optional[Tensor] = None - ) -> Tuple[Tensor, Tensor]: - score = torch.bmm(query, key.transpose(1, 2)) / self.sqrt_dim - - if mask is not None: - score.masked_fill_(mask.view(score.size()), -float("Inf")) - - attn = F.softmax(score, -1) - context = torch.bmm(attn, value) - return context, attn - - -class DotProductAttention(nn.Module): - """ - Compute the dot products of the query with all values and apply a softmax function to obtain the weights on the values - """ - - def __init__(self, hidden_dim): - super(DotProductAttention, self).__init__() - - def forward(self, query: Tensor, value: Tensor) -> Tuple[Tensor, Tensor]: - batch_size, hidden_dim, input_size = query.size(0), query.size(2), value.size(1) - - score = torch.bmm(query, value.transpose(1, 2)) - attn = F.softmax(score.view(-1, input_size), dim=1).view( - batch_size, -1, input_size - ) - context = torch.bmm(attn, value) - - return context, attn - - -class AdditiveAttention(nn.Module): - """ - Applies a additive attention (bahdanau) mechanism on the output features from the decoder. - Additive attention proposed in "Neural Machine Translation by Jointly Learning to Align and Translate" paper. - Args: - hidden_dim (int): dimesion of hidden state vector - Inputs: query, value - - **query** (batch_size, q_len, hidden_dim): tensor containing the output features from the decoder. - - **value** (batch_size, v_len, hidden_dim): tensor containing features of the encoded input sequence. - Returns: context, attn - - **context**: tensor containing the context vector from attention mechanism. - - **attn**: tensor containing the alignment from the encoder outputs. - Reference: - - **Neural Machine Translation by Jointly Learning to Align and Translate**: https://arxiv.org/abs/1409.0473 - """ - - def __init__(self, hidden_dim: int) -> None: - super(AdditiveAttention, self).__init__() - self.query_proj = nn.Linear(hidden_dim, hidden_dim, bias=False) - self.key_proj = nn.Linear(hidden_dim, hidden_dim, bias=False) - self.bias = nn.Parameter(torch.rand(hidden_dim).uniform_(-0.1, 0.1)) - self.score_proj = nn.Linear(hidden_dim, 1) - - def forward( - self, query: Tensor, key: Tensor, value: Tensor - ) -> Tuple[Tensor, Tensor]: - score = self.score_proj( - torch.tanh(self.key_proj(key) + self.query_proj(query) + self.bias) - ).squeeze(-1) - attn = F.softmax(score, dim=-1) - context = torch.bmm(attn.unsqueeze(1), value) - return context, attn - - -class LocationAwareAttention(nn.Module): - """ - Applies a location-aware attention mechanism on the output features from the decoder. - Location-aware attention proposed in "Attention-Based Models for Speech Recognition" paper. - The location-aware attention mechanism is performing well in speech recognition tasks. - We refer to implementation of ClovaCall Attention style. - Args: - hidden_dim (int): dimesion of hidden state vector - smoothing (bool): flag indication whether to use smoothing or not. - Inputs: query, value, last_attn, smoothing - - **query** (batch, q_len, hidden_dim): tensor containing the output features from the decoder. - - **value** (batch, v_len, hidden_dim): tensor containing features of the encoded input sequence. - - **last_attn** (batch_size * num_heads, v_len): tensor containing previous timestep`s attention (alignment) - Returns: output, attn - - **output** (batch, output_len, dimensions): tensor containing the feature from encoder outputs - - **attn** (batch * num_heads, v_len): tensor containing the attention (alignment) from the encoder outputs. - Reference: - - **Attention-Based Models for Speech Recognition**: https://arxiv.org/abs/1506.07503 - - **ClovaCall**: https://github.com/clovaai/ClovaCall/blob/master/las.pytorch/models/attention.py - """ - - def __init__(self, hidden_dim: int, smoothing: bool = True) -> None: - super(LocationAwareAttention, self).__init__() - self.hidden_dim = hidden_dim - self.conv1d = nn.Conv1d( - in_channels=1, out_channels=hidden_dim, kernel_size=3, padding=1 - ) - self.query_proj = nn.Linear(hidden_dim, hidden_dim, bias=False) - self.value_proj = nn.Linear(hidden_dim, hidden_dim, bias=False) - self.score_proj = nn.Linear(hidden_dim, 1, bias=True) - self.bias = nn.Parameter(torch.rand(hidden_dim).uniform_(-0.1, 0.1)) - self.smoothing = smoothing - self.prev_attn = None - - def forward(self, query: Tensor, value: Tensor) -> Tuple[Tensor, Tensor]: - batch_size, hidden_dim, seq_len = query.size(0), query.size(2), value.size(1) - - # Initialize previous attention (alignment) to zeros - if (self.prev_attn is None) or (self.prev_attn.size(0) != batch_size): - self.prev_attn = value.new_zeros(batch_size, seq_len) - - conv_attn = torch.transpose(self.conv1d(self.prev_attn.unsqueeze(1)), 1, 2) - score = self.score_proj( - torch.tanh( - self.query_proj(query.reshape(-1, hidden_dim)).view( - batch_size, -1, hidden_dim - ) - + self.value_proj(value.reshape(-1, hidden_dim)).view( - batch_size, -1, hidden_dim - ) - + conv_attn - + self.bias - ) - ).squeeze(dim=-1) - - if self.smoothing: - score = torch.sigmoid(score) - attn = torch.div(score, score.sum(dim=-1).unsqueeze(dim=-1)) - else: - attn = F.softmax(score, dim=-1) - - context = torch.bmm(attn.unsqueeze(dim=1), value).squeeze( - dim=1 - ) # Bx1xT X BxTxD => Bx1xD => BxD - self.prev_attn = attn.detach() - - return context, attn - - -class MultiHeadLocationAwareAttention(nn.Module): - """ - Applies a multi-headed location-aware attention mechanism on the output features from the decoder. - Location-aware attention proposed in "Attention-Based Models for Speech Recognition" paper. - The location-aware attention mechanism is performing well in speech recognition tasks. - In the above paper applied a signle head, but we applied multi head concept. - Args: - hidden_dim (int): The number of expected features in the output - num_heads (int): The number of heads. (default: ) - conv_out_channel (int): The number of out channel in convolution - Inputs: query, value, prev_attn - - **query** (batch, q_len, hidden_dim): tensor containing the output features from the decoder. - - **value** (batch, v_len, hidden_dim): tensor containing features of the encoded input sequence. - - **prev_attn** (batch_size * num_heads, v_len): tensor containing previous timestep`s attention (alignment) - Returns: output, attn - - **output** (batch, output_len, dimensions): tensor containing the feature from encoder outputs - - **attn** (batch * num_heads, v_len): tensor containing the attention (alignment) from the encoder outputs. - Reference: - - **Attention Is All You Need**: https://arxiv.org/abs/1706.03762 - - **Attention-Based Models for Speech Recognition**: https://arxiv.org/abs/1506.07503 - """ - - def __init__( - self, hidden_dim: int, num_heads: int = 8, conv_out_channel: int = 10 - ) -> None: - super(MultiHeadLocationAwareAttention, self).__init__() - self.hidden_dim = hidden_dim - self.num_heads = num_heads - self.dim = int(hidden_dim / num_heads) - self.conv1d = nn.Conv1d(num_heads, conv_out_channel, kernel_size=3, padding=1) - self.loc_proj = nn.Linear(conv_out_channel, self.dim, bias=False) - self.query_proj = nn.Linear(hidden_dim, self.dim * num_heads, bias=False) - self.value_proj = nn.Linear(hidden_dim, self.dim * num_heads, bias=False) - self.score_proj = nn.Linear(self.dim, 1, bias=True) - self.bias = nn.Parameter(torch.rand(self.dim).uniform_(-0.1, 0.1)) - self.prev_attn = None - - def forward(self, query: Tensor, value: Tensor) -> Tuple[Tensor, Tensor]: - batch_size, seq_len = value.size(0), value.size(1) - - if self.prev_attn is None: - self.prev_attn = value.new_zeros(batch_size, self.num_heads, seq_len) - - loc_energy = torch.tanh( - self.loc_proj(self.conv1d(self.prev_attn).transpose(1, 2)) - ) - loc_energy = ( - loc_energy.unsqueeze(1) - .repeat(1, self.num_heads, 1, 1) - .view(-1, seq_len, self.dim) - ) - - query = ( - self.query_proj(query) - .view(batch_size, -1, self.num_heads, self.dim) - .permute(0, 2, 1, 3) - ) - value = ( - self.value_proj(value) - .view(batch_size, -1, self.num_heads, self.dim) - .permute(0, 2, 1, 3) - ) - query = query.contiguous().view(-1, 1, self.dim) - value = value.contiguous().view(-1, seq_len, self.dim) - - score = self.score_proj( - torch.tanh(value + query + loc_energy + self.bias) - ).squeeze(2) - attn = F.softmax(score, dim=1) - - value = value.view(batch_size, seq_len, self.num_heads, self.dim).permute( - 0, 2, 1, 3 - ) - value = value.contiguous().view(-1, seq_len, self.dim) - - context = torch.bmm(attn.unsqueeze(1), value).view( - batch_size, -1, self.num_heads * self.dim - ) - attn = attn.view(batch_size, self.num_heads, -1) - self.prev_attn = attn - - return context, attn - - -class MultiHeadAttention(nn.Module): - """ - Multi-Head Attention proposed in "Attention Is All You Need" - Instead of performing a single attention function with d_model-dimensional keys, values, and queries, - project the queries, keys and values h times with different, learned linear projections to d_head dimensions. - These are concatenated and once again projected, resulting in the final values. - Multi-head attention allows the model to jointly attend to information from different representation - subspaces at different positions. - MultiHead(Q, K, V) = Concat(head_1, ..., head_h) · W_o - where head_i = Attention(Q · W_q, K · W_k, V · W_v) - Args: - d_model (int): The dimension of keys / values / quries (default: 512) - num_heads (int): The number of attention heads. (default: 8) - Inputs: query, key, value, mask - - **query** (batch, q_len, d_model): In transformer, three different ways: - Case 1: come from previoys decoder layer - Case 2: come from the input embedding - Case 3: come from the output embedding (masked) - - **key** (batch, k_len, d_model): In transformer, three different ways: - Case 1: come from the output of the encoder - Case 2: come from the input embeddings - Case 3: come from the output embedding (masked) - - **value** (batch, v_len, d_model): In transformer, three different ways: - Case 1: come from the output of the encoder - Case 2: come from the input embeddings - Case 3: come from the output embedding (masked) - - **mask** (-): tensor containing indices to be masked - Returns: output, attn - - **output** (batch, output_len, dimensions): tensor containing the attended output features. - - **attn** (batch * num_heads, v_len): tensor containing the attention (alignment) from the encoder outputs. - """ - - def __init__(self, d_model: int = 512, num_heads: int = 8): - super(MultiHeadAttention, self).__init__() - - assert d_model % num_heads == 0, "d_model % num_heads should be zero." - - self.d_head = int(d_model / num_heads) - self.num_heads = num_heads - self.scaled_dot_attn = ScaledDotProductAttention(self.d_head) - self.query_proj = nn.Linear(d_model, self.d_head * num_heads) - self.key_proj = nn.Linear(d_model, self.d_head * num_heads) - self.value_proj = nn.Linear(d_model, self.d_head * num_heads) - - def forward( - self, query: Tensor, key: Tensor, value: Tensor, mask: Optional[Tensor] = None - ) -> Tuple[Tensor, Tensor]: - batch_size = value.size(0) - - query = self.query_proj(query).view( - batch_size, -1, self.num_heads, self.d_head - ) # BxQ_LENxNxD - key = self.key_proj(key).view( - batch_size, -1, self.num_heads, self.d_head - ) # BxK_LENxNxD - value = self.value_proj(value).view( - batch_size, -1, self.num_heads, self.d_head - ) # BxV_LENxNxD - - query = ( - query.permute(2, 0, 1, 3) - .contiguous() - .view(batch_size * self.num_heads, -1, self.d_head) - ) # BNxQ_LENxD - key = ( - key.permute(2, 0, 1, 3) - .contiguous() - .view(batch_size * self.num_heads, -1, self.d_head) - ) # BNxK_LENxD - value = ( - value.permute(2, 0, 1, 3) - .contiguous() - .view(batch_size * self.num_heads, -1, self.d_head) - ) # BNxV_LENxD - - if mask is not None: - mask = mask.unsqueeze(1).repeat(1, self.num_heads, 1, 1) # BxNxQ_LENxK_LEN - - context, attn = self.scaled_dot_attn(query, key, value, mask) - - context = context.view(self.num_heads, batch_size, -1, self.d_head) - context = ( - context.permute(1, 2, 0, 3) - .contiguous() - .view(batch_size, -1, self.num_heads * self.d_head) - ) # BxTxND - - return context, attn - - -class RelativeMultiHeadAttention(nn.Module): - """ - Multi-head attention with relative positional encoding. - This concept was proposed in the "Transformer-XL: Attentive Language Models Beyond a Fixed-Length Context" - Args: - d_model (int): The dimension of model - num_heads (int): The number of attention heads. - dropout_p (float): probability of dropout - Inputs: query, key, value, pos_embedding, mask - - **query** (batch, time, dim): Tensor containing query vector - - **key** (batch, time, dim): Tensor containing key vector - - **value** (batch, time, dim): Tensor containing value vector - - **pos_embedding** (batch, time, dim): Positional embedding tensor - - **mask** (batch, 1, time2) or (batch, time1, time2): Tensor containing indices to be masked - Returns: - - **outputs**: Tensor produces by relative multi head attention module. - """ - - def __init__( - self, - d_model: int = 512, - num_heads: int = 16, - dropout_p: float = 0.1, - ): - super(RelativeMultiHeadAttention, self).__init__() - assert d_model % num_heads == 0, "d_model % num_heads should be zero." - self.d_model = d_model - self.d_head = int(d_model / num_heads) - self.num_heads = num_heads - self.sqrt_dim = math.sqrt(d_model) - - self.query_proj = nn.Linear(d_model, d_model) - self.key_proj = nn.Linear(d_model, d_model) - self.value_proj = nn.Linear(d_model, d_model) - self.pos_proj = nn.Linear(d_model, d_model, bias=False) - - self.dropout = nn.Dropout(p=dropout_p) - self.u_bias = nn.Parameter(torch.Tensor(self.num_heads, self.d_head)) - self.v_bias = nn.Parameter(torch.Tensor(self.num_heads, self.d_head)) - torch.nn.init.xavier_uniform_(self.u_bias) - torch.nn.init.xavier_uniform_(self.v_bias) - - self.out_proj = nn.Linear(d_model, d_model) - - def forward( - self, - query: Tensor, - key: Tensor, - value: Tensor, - pos_embedding: Tensor, - mask: Optional[Tensor] = None, - ) -> Tensor: - batch_size = value.size(0) - - query = self.query_proj(query).view(batch_size, -1, self.num_heads, self.d_head) - key = ( - self.key_proj(key) - .view(batch_size, -1, self.num_heads, self.d_head) - .permute(0, 2, 1, 3) - ) - value = ( - self.value_proj(value) - .view(batch_size, -1, self.num_heads, self.d_head) - .permute(0, 2, 1, 3) - ) - pos_embedding = self.pos_proj(pos_embedding).view( - batch_size, -1, self.num_heads, self.d_head - ) - - content_score = torch.matmul( - (query + self.u_bias).transpose(1, 2), key.transpose(2, 3) - ) - pos_score = torch.matmul( - (query + self.v_bias).transpose(1, 2), pos_embedding.permute(0, 2, 3, 1) - ) - pos_score = self._compute_relative_positional_encoding(pos_score) - - score = (content_score + pos_score) / self.sqrt_dim - - if mask is not None: - mask = mask.unsqueeze(1) - score.masked_fill_(mask, -1e9) - - attn = F.softmax(score, -1) - attn = self.dropout(attn) - - context = torch.matmul(attn, value).transpose(1, 2) - context = context.contiguous().view(batch_size, -1, self.d_model) - - return self.out_proj(context) - - def _compute_relative_positional_encoding(self, pos_score: Tensor) -> Tensor: - batch_size, num_heads, seq_length1, seq_length2 = pos_score.size() - zeros = pos_score.new_zeros(batch_size, num_heads, seq_length1, 1) - padded_pos_score = torch.cat([zeros, pos_score], dim=-1) - - padded_pos_score = padded_pos_score.view( - batch_size, num_heads, seq_length2 + 1, seq_length1 - ) - pos_score = padded_pos_score[:, :, 1:].view_as(pos_score) - - return pos_score - - -class CustomizingAttention(nn.Module): - r""" - Customizing Attention - Applies a multi-head + location-aware attention mechanism on the output features from the decoder. - Multi-head attention proposed in "Attention Is All You Need" paper. - Location-aware attention proposed in "Attention-Based Models for Speech Recognition" paper. - I combined these two attention mechanisms as custom. - Args: - hidden_dim (int): The number of expected features in the output - num_heads (int): The number of heads. (default: ) - conv_out_channel (int): The dimension of convolution - Inputs: query, value, last_attn - - **query** (batch, q_len, hidden_dim): tensor containing the output features from the decoder. - - **value** (batch, v_len, hidden_dim): tensor containing features of the encoded input sequence. - - **last_attn** (batch_size * num_heads, v_len): tensor containing previous timestep`s alignment - Returns: output, attn - - **output** (batch, output_len, dimensions): tensor containing the attended output features from the decoder. - - **attn** (batch * num_heads, v_len): tensor containing the alignment from the encoder outputs. - Reference: - - **Attention Is All You Need**: https://arxiv.org/abs/1706.03762 - - **Attention-Based Models for Speech Recognition**: https://arxiv.org/abs/1506.07503 - """ - - def __init__( - self, hidden_dim: int, num_heads: int = 4, conv_out_channel: int = 10 - ) -> None: - super(CustomizingAttention, self).__init__() - self.hidden_dim = hidden_dim - self.num_heads = num_heads - self.dim = int(hidden_dim / num_heads) - self.scaled_dot_attn = ScaledDotProductAttention(self.dim) - self.conv1d = nn.Conv1d(1, conv_out_channel, kernel_size=3, padding=1) - self.query_proj = nn.Linear(hidden_dim, self.dim * num_heads, bias=True) - self.value_proj = nn.Linear(hidden_dim, self.dim * num_heads, bias=False) - self.loc_proj = nn.Linear(conv_out_channel, self.dim, bias=False) - self.bias = nn.Parameter(torch.rand(self.dim * num_heads).uniform_(-0.1, 0.1)) - - def forward( - self, query: Tensor, value: Tensor, last_attn: Tensor - ) -> Tuple[Tensor, Tensor]: - batch_size, q_len, v_len = value.size(0), query.size(1), value.size(1) - - if last_attn is None: - last_attn = value.new_zeros(batch_size * self.num_heads, v_len) - - loc_energy = self.get_loc_energy( - last_attn, batch_size, v_len - ) # get location energy - - query = self.query_proj(query).view( - batch_size, q_len, self.num_heads * self.dim - ) - value = ( - self.value_proj(value).view(batch_size, v_len, self.num_heads * self.dim) - + loc_energy - + self.bias - ) - - query = query.view(batch_size, q_len, self.num_heads, self.dim).permute( - 2, 0, 1, 3 - ) - value = value.view(batch_size, v_len, self.num_heads, self.dim).permute( - 2, 0, 1, 3 - ) - query = query.contiguous().view(-1, q_len, self.dim) - value = value.contiguous().view(-1, v_len, self.dim) - - context, attn = self.scaled_dot_attn(query, value) - attn = attn.squeeze() - - context = context.view(self.num_heads, batch_size, q_len, self.dim).permute( - 1, 2, 0, 3 - ) - context = context.contiguous().view(batch_size, q_len, -1) - - return context, attn - - def get_loc_energy(self, last_attn: Tensor, batch_size: int, v_len: int) -> Tensor: - conv_feat = self.conv1d(last_attn.unsqueeze(1)) - conv_feat = conv_feat.view(batch_size, self.num_heads, -1, v_len).permute( - 0, 1, 3, 2 - ) - - loc_energy = self.loc_proj(conv_feat).view( - batch_size, self.num_heads, v_len, self.dim - ) - loc_energy = loc_energy.permute(0, 2, 1, 3).reshape( - batch_size, v_len, self.num_heads * self.dim - ) - - return loc_energy diff --git a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/autoencoder.py b/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/autoencoder.py deleted file mode 100644 index 99279c110d614ea1fc9c71f82551f39797044621..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/autoencoder.py +++ /dev/null @@ -1,433 +0,0 @@ -import torch -from torch import nn - -# from .conv import (Conv2d, Conv2dTranspose, ResidualConv2d) -from .attentions import * - -bias = True -inplace = False - - -class Conv1d(nn.Module): - def __init__(self, cin, cout, kernel_size, stride, padding, act=True): - super().__init__() - self.block = nn.Sequential( - nn.Conv1d( - cin, - cout, - kernel_size=kernel_size, - stride=stride, - padding=padding, - bias=bias, - ), - nn.BatchNorm1d(cout, momentum=0.8, eps=0.001), - ) - self.relu = nn.ReLU(inplace=inplace) if act else None - - def forward(self, x): - x1 = self.block(x) - if self.relu: - return self.relu(x1) - else: - return x1 - - -class Conv2d(nn.Module): - def __init__(self, cin, cout, kernel_size, stride, padding, act=True): - super().__init__() - self.block = nn.Sequential( - nn.Conv2d( - cin, - cout, - kernel_size=kernel_size, - stride=stride, - padding=padding, - bias=bias, - ), - nn.BatchNorm2d(cout, momentum=0.8, eps=0.001), - ) - self.relu = nn.ReLU(inplace=inplace) if act else None - - def forward(self, x): - x1 = self.block(x) - if self.relu: - return self.relu(x1) - else: - return x1 - - -class Conv2dTranspose(nn.Module): - def __init__( - self, cin, cout, kernel_size, stride, padding, output_padding=0, act=True - ): - super().__init__() - self.conv_block = nn.Sequential( - nn.ConvTranspose2d( - cin, cout, kernel_size, stride, padding, output_padding, bias=bias - ), - nn.BatchNorm2d(cout, momentum=0.8, eps=0.001), - ) - self.relu = nn.ReLU(inplace=inplace) if act else None - - def forward(self, x): - x1 = self.conv_block(x) - if self.relu: - return self.relu(x1) - else: - return x1 - - -class Residual_1d(nn.Module): - def __init__(self, c): - super().__init__() - self.conv1 = Conv1d(c, c, 3, 1, 1) - self.conv2 = Conv1d(c, c, 3, 1, 1) - self.relu = nn.ReLU(inplace=inplace) - - def forward(self, x): - x1 = self.conv1(x) - x2 = self.conv2(x1) - x3 = x2 + x - x3 = self.relu(x3) - return x3 - - -class Residual(nn.Module): - def __init__(self, c): - super().__init__() - self.conv1 = Conv2d(c, c, 3, 1, 1) - self.conv2 = Conv2d(c, c, 3, 1, 1) - self.relu = nn.ReLU(inplace=inplace) - - def forward(self, x): - x1 = self.conv1(x) - x2 = self.conv2(x1) - x3 = x2 + x - x3 = self.relu(x3) - return x3 - - -class Encoder(nn.Module): - def __init__(self, shape): - super().__init__() - - c, h, w = shape - - self.id_map = Conv2d(c, 32, 7, 1, 3) - - self.conv1 = Conv2d(32, 64, 5, 2, 2) - self.residual11 = Residual(64) - self.residual12 = Residual(64) - - self.conv2 = Conv2d(64, 128, 3, 2, 1) - self.residual21 = Residual(128) - self.residual22 = Residual(128) - self.residual23 = Residual(128) - - self.conv3 = Conv2d(128, 256, 3, 2, 1) - self.residual31 = Residual(256) - self.residual32 = Residual(256) - - self.conv4 = Conv2d(256, 512, 3, 2, 1) - self.residual41 = Residual(512) - self.residual42 = Residual(512) - - self.conv5 = Conv2d(512, 512, 3, 2, 1) - - self.conv6 = Conv2d(512, 512, 3, 1, 0) - - kh, kw = ((h + 31) // 32 - 2), ((w + 31) // 32 - 2) - # kh, kw = 2 * (kh // 2) + 1, 2 * (kw // 2) + 1 - self.conv7 = Conv2d(512, 512, (kh, kw), 1, 0) - - def forward(self, x): - id_map = self.id_map(x) # 32: 256, 108, 96 - - ft10 = self.conv1(id_map) # 64: 128, 54, 48 - ft11 = self.residual11(ft10) - ft12 = self.residual12(ft11) - - ft20 = self.conv2(ft12) # 128: 64, 27, 24 - ft21 = self.residual21(ft20) - ft22 = self.residual22(ft21) - ft23 = self.residual23(ft22) - - ft30 = self.conv3(ft23) # 256: 32, 14, 12 - ft31 = self.residual31(ft30) - ft32 = self.residual32(ft31) - - ft40 = self.conv4(ft32) # 512: 16, 7, 6 - ft41 = self.residual41(ft40) - ft42 = self.residual42(ft41) - - ft50 = self.conv5(ft42) # 512: 8, 4, 3 - ft60 = self.conv6(ft50) # 512: 6, 2, 1 - ft70 = self.conv7(ft60) # 512: 1, 1, 1 - - return [id_map, ft12, ft23, ft32, ft42, ft50, ft60, ft70] - - -class EncoderAudio(nn.Module): - def __init__(self, shape): - super().__init__() - - c, h, w = shape # 1, 96, 108 - - self.conv1 = Conv2d(c, 32, 3, 1, 1) - self.residual11 = Residual(32) - self.residual12 = Residual(32) - - self.conv2 = Conv2d(32, 64, 3, 3, 1) - self.residual21 = Residual(64) - self.residual22 = Residual(64) - - # sh, sw = (h + 26) // 27, (w + 26) // 27 - # self.conv3 = Conv2d(64, 128, (5, 5), (sh, sw), (sh//2, sw//2)) - # k = (w+26)//27 # w=108 => k=4 - self.conv3 = Conv2d(64, 128, 3, (3, 3), 1) - self.residual31 = Residual(128) - self.residual32 = Residual(128) - - self.conv4 = Conv2d(128, 256, 3, 3, 1) - self.residual41 = Residual(256) - self.residual42 = Residual(256) - - self.conv5 = Conv2d(256, 512, 4, 1, 0) - - self.conv6 = Conv2d(512, 512, 1, 1, 0) - - def forward(self, x): - ft10 = self.conv1(x) # 96x108 - ft11 = self.residual11(ft10) - ft12 = self.residual12(ft11) - - ft20 = self.conv2(ft12) # 32x36 - ft21 = self.residual21(ft20) - ft22 = self.residual22(ft21) - - ft30 = self.conv3(ft22) # 11x9 | 11x12 - ft31 = self.residual31(ft30) - ft32 = self.residual32(ft31) - - ft40 = self.conv4(ft32) # 4x3 | 4x4 - ft41 = self.residual41(ft40) - ft42 = self.residual42(ft41) - - ft50 = self.conv5(ft42) # 1x1 | 1, 1 - ft60 = self.conv6(ft50) # 1x1 - - return ft60 - - -class EncoderProcessedAudio(nn.Module): - def __init__(self, channel): - super().__init__() - - self.conv1 = Conv1d(channel, 512, 3, 1, 0) - self.residual11 = Residual_1d(512) - self.residual12 = Residual_1d(512) - - self.conv2 = Conv1d(512, 256, 3, 1, 0) - self.residual21 = Residual_1d(256) - self.residual22 = Residual_1d(256) - - self.conv3 = Conv1d(256, 128, 3, 1, 0) - self.residual31 = Residual_1d(128) - self.residual32 = Residual_1d(128) - - self.conv4 = Conv1d(128, 64, 3, 1, 0) - self.residual41 = Residual_1d(64) - self.residual42 = Residual_1d(64) - - self.conv5 = Conv1d(64, 32, 3, 2, 0) - self.residual51 = Residual_1d(32) - self.residual52 = Residual_1d(32) - - self.conv6 = Conv1d(32, 16, 3, 1, 0) - self.residual61 = Residual_1d(16) - self.residual62 = Residual_1d(16) - - def forward(self, x): - x = self.conv1(x.permute(0, 2, 1)) - x = self.residual11(x) - x = self.residual12(x) - - x = self.conv2(x) - x = self.residual21(x) - x = self.residual22(x) - - x = self.conv3(x) - x = self.residual31(x) - x = self.residual32(x) - - x = self.conv4(x) - x = self.residual41(x) - x = self.residual42(x) - - x = self.conv5(x) - x = self.residual51(x) - x = self.residual52(x) - - x = self.conv6(x) - x = self.residual61(x) - x = self.residual62(x) - - x = torch.flatten(x, start_dim=1).unsqueeze(dim=-1).unsqueeze(dim=-1) - - return x - - -class Decoder(nn.Module): - def __init__(self, shape): - super().__init__() - - c, h, w = shape - kh, kw = (h + 31) // 32, (w + 31) // 32 - self.convt1 = Conv2dTranspose(1024, 512, (kh, kw), (kh, kw), 0) - - self.convt2 = Conv2dTranspose(1024, 512, 3, 2, 1, 1) - self.residual21 = Residual(512) - self.residual22 = Residual(512) - - self.convt3 = Conv2dTranspose(1024, 256, 3, 2, 1, 1) - self.residual31 = Residual(256) - self.residual32 = Residual(256) - - self.convt4 = Conv2dTranspose(512, 128, 3, 2, 1, 1) - self.residual41 = Residual(128) - self.residual42 = Residual(128) - - self.convt5 = Conv2dTranspose(256, 64, 3, 2, 1, 1) - self.residual51 = Residual(64) - self.residual52 = Residual(64) - - self.convt6 = Conv2dTranspose(128, 32, 3, 2, 1, 1) - - self.conv7 = Conv2d(64, 16, 3, 1, 1) - self.conv8 = Conv2d(16, 16, 3, 1, 1) - self.conv9 = nn.Conv2d(16, 3, kernel_size=1, stride=1, padding=0) - self.sigmoid = nn.Sigmoid() - self.tanh = nn.Tanh() - - def forward(self, img_ft, audio_ft): - x = torch.cat([img_ft[-1], audio_ft], dim=1) # (B, 1024, 1, 1) - # 256, 96, - - x = self.convt1(x) # (B, 512: 8, 3) - - x = torch.cat([img_ft[5], x], dim=1) - - x = self.convt2(x) # (B, 512: 16, 6) - x = self.residual21(x) - x = self.residual22(x) - x = torch.cat([img_ft[4], x], dim=1) - - x = self.convt3(x) # (B, 256: 32, 12) - x = self.residual31(x) - x = self.residual32(x) - x = torch.cat([img_ft[3], x], dim=1) - - x = self.convt4(x) # (B, 128: 64, 24) - x = self.residual41(x) - x = self.residual42(x) - x = torch.cat([img_ft[2], x], dim=1) - - x = self.convt5(x) # (B, 64: 128, 48) - x = self.residual51(x) - x = self.residual52(x) - x = torch.cat([img_ft[1], x], dim=1) - - x = self.convt6(x) # (B, 32: 256, 96) - x = torch.cat([img_ft[0], x], dim=1) - x = self.conv7(x) # (B, 16: 256, 96) - x = self.conv8(x) # (B, 16: 256, 96) - - x = self.conv9(x) # (B, 3: 256, 96) - x = self.tanh(x) - - return x - - -class Speech2Face(nn.Module): - def __init__( - self, img_num, img_shape, audio_shape, model_type, infer=False, alpha=1.0 - ): - super().__init__() - - self.model_type = model_type - - if self.model_type == "stf_v3": - self.speech_encoder = EncoderProcessedAudio(1024) - else: - self.speech_encoder = EncoderAudio(audio_shape) - - c, h, w = img_shape - c = c * img_num - self.face_encoder = Encoder((c, h, w)) - self.face_decoder = Decoder(img_shape) - self.infer = infer - self.alpha = alpha - self.tanh = nn.Tanh() - self.relu = nn.ReLU() - - if self.model_type in ["aeltu", "stf_v2"]: - self.attention = LocationAwareAttention(512) - """ - elif self.model_type == "stf_v3": - self.audio_att = LocationAwareAttention(512) - self.image_att = LocationAwareAttention(512) - """ - - for m in self.modules(): - if isinstance(m, (nn.Conv1d, nn.Conv2d, nn.ConvTranspose2d)): - if bias == False: - nn.init.kaiming_normal_( - m.weight, mode="fan_out", nonlinearity="relu" - ) - else: - nn.init.xavier_uniform_(m.weight) - if m.bias is not None: - nn.init.constant_(m.bias, 0) - - elif isinstance(m, (nn.BatchNorm1d, nn.BatchNorm2d, nn.GroupNorm)): - nn.init.constant_(m.weight, 1) - nn.init.constant_(m.bias, 0) - - def forward(self, img, audio): - img_e = self.face_encoder(img) - audio_e = self.speech_encoder(audio) - - if self.model_type in ["ae", "stf_v1", "stf_v3"]: - img = self.face_decoder(img_e, audio_e) - elif self.model_type in ["aeltu", "stf_v2"]: - if audio_e.size(0) == 1: - att, _ = self.attention( - audio_e.squeeze().unsqueeze(dim=0).unsqueeze(dim=0), - audio_e.squeeze().unsqueeze(dim=0).unsqueeze(dim=0), - ) - att = att.unsqueeze(dim=-1).unsqueeze(dim=-1) - else: - att, _ = self.attention( - audio_e.squeeze().unsqueeze(dim=1), - audio_e.squeeze().unsqueeze(dim=1), - ) - att = att.squeeze().unsqueeze(dim=-1).unsqueeze(dim=-1) - img = self.face_decoder(img_e, self.tanh(att)) - """ - elif self.model_type == "stf_v3": - if audio_e.size(0) == 1: - a_att,_ = self.audio_att(audio_e.squeeze().unsqueeze(dim=0).unsqueeze(dim=0), audio_e.squeeze().unsqueeze(dim=0).unsqueeze(dim=0)) - a_att = a_att.unsqueeze(dim=-1).unsqueeze(dim=-1) - - i_att,_ = self.image_att(img_e[-1].squeeze().unsqueeze(dim=0).unsqueeze(dim=0), img_e[-1].squeeze().unsqueeze(dim=0).unsqueeze(dim=0)) - i_att = i_att.unsqueeze(dim=-1).unsqueeze(dim=-1) - else: - a_att,_ = self.audio_att(audio_e.squeeze().unsqueeze(dim=1), audio_e.squeeze().unsqueeze(dim=1)) - a_att = a_att.squeeze().unsqueeze(dim=-1).unsqueeze(dim=-1) - - i_att,_ = self.image_att(img_e[-1].squeeze().unsqueeze(dim=1), img_e[-1].squeeze().unsqueeze(dim=1)) - i_att = i_att.squeeze().unsqueeze(dim=-1).unsqueeze(dim=-1) - img_e[-1] = self.tanh(i_att) - img = self.face_decoder(img_e, self.tanh(a_att)) - """ - return img diff --git a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/datagen_aug.py b/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/datagen_aug.py deleted file mode 100644 index 0d93ab42adf0623cc978cbfa33e5e3fb9225ae91..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/datagen_aug.py +++ /dev/null @@ -1,402 +0,0 @@ -import itertools -import os -import pdb -import random -import time -from glob import escape, glob -from pathlib import Path - -import cv2 -import numpy as np -import pandas as pd -import torch -from scipy.interpolate import interp1d -from torch.utils.data import DataLoader, Dataset - -from .mask_history import calc_poly -from .transform_history import mask_img_trsfs - -# snow : LipGanDS.__init__ 에서 계하도록 변경됨 -# half_window_size = 4 -# parameter로 받도록 수정 -# mel_step_size = 27 - - -def frame_id(fname): - return int(os.path.basename(fname).split("_")[0]) - - -def choose_ip_frame(frames, gt_frame, num_ips): - d = os.path.dirname(frames[0]) - return [os.path.join(d, gt_frame) for _ in range(num_ips)] - - -def get_audio_segment(center_frame, spec, mel_step_size, mel_ps, fps, half_window_size): - center_frame_id = frame_id(center_frame) - start_frame_id = center_frame_id - half_window_size - - # start_idx = int((80./25.) * start_frame_id) # 25 is fps of LRS2 - start_idx = int( - (float(mel_ps) / float(fps)) * start_frame_id - ) # mel, frame per sec 에 따라 계산 - if start_idx < 0: - spec = np.pad(spec, ((0, 0), (-start_idx, 0)), mode="edge") - start_idx = 0 - - end_idx = start_idx + mel_step_size - if spec.shape[1] < end_idx: - spec = np.pad(spec, ((0, 0), (0, end_idx - spec.shape[1])), mode="edge") - - # print('center_frame_id:', center_frame_id, ', mel [s,e]', start_idx, end_idx, ', mel shape:', spec.shape) - - return spec[:, start_idx:end_idx] - - -def get_processed_audio_segment(center_frame, processed_wav, fps, sample_rate): - center_frame_id = frame_id(center_frame) - - time_center = center_frame_id / fps - - center_idx = int(time_center * sample_rate) - center_idx = center_idx // 320 - start_idx = center_idx - 39 - - new_logits = processed_wav.copy() - if start_idx < 0: - new_logits = np.pad( - new_logits, ((-start_idx, 0), (0, 0)), mode="constant", constant_values=0 - ) - start_idx = 0 - - end_idx = start_idx + 39 * 2 - if len(new_logits) < end_idx: - new_logits = np.pad( - new_logits, - ((0, end_idx - len(new_logits)), (0, 0)), - mode="constant", - constant_values=0, - ) - - return new_logits[start_idx:end_idx, :] - - -def inter_alg(target_size, img): - if isinstance(target_size, tuple): - w, h = target_size - else: - w, h = target_size, target_size - return inter_alg_(w, h, img) - - -def inter_alg_(w, h, img): - if w * h < img.shape[0] * img.shape[1]: - return cv2.INTER_AREA - else: - return cv2.INTER_CUBIC - - -def resize_adapt(args, img): - sz = args.img_size - h, w = img.shape[:2] - if h == sz and w == sz: - return img - channel_size = img.shape[2] - board = np.full((sz, sz, channel_size), 128, np.uint8) - h, w = img.shape[:2] - if True: - # if sz < max(h, w): - r = sz / max(h, w) - h, w = int(round(r * h)), int(round(r * w)) - img = cv2.resize(img, (w, h), inter_alg(sz, img)) - board[(sz - h) // 2 : (sz - h) // 2 + h, (sz - w) // 2 : (sz - w) // 2 + w] = img - return board - - -def resize_adapt_pts(args, img, pts): - sz = args.img_size - h, w = img.shape[:2] - r = sz / max(h, w) - pts = pts * r - pts = np.round(np.array(pts)).astype(np.int32) - return pts - - -def masking(im, pts): - im = cv2.fillPoly(im, [pts], (128, 128, 128)) - return im - - -def smoothing_mask(pts): - pts = np.array(pts) - x = list(pts[:, 0]) - y = list(pts[:, 1]) - x = x + [x[0]] - y = y + [y[0]] - - # Pad the x and y series so it "wraps around". - # Note that if x and y are numpy arrays, you'll need to - # use np.r_ or np.concatenate instead of addition! - orig_len = len(x) - x = x[-3:-1] + x + x[1:3] - y = y[-3:-1] + y + y[1:3] - - t = np.arange(len(x)) - ti = np.linspace(2, orig_len + 1, 10 * orig_len) - - xi = interp1d(t, x, kind="cubic")(ti) - yi = interp1d(t, y, kind="cubic")(ti) - v = np.stack((xi, yi), axis=1) - v = v.astype(np.int) - - # fig, ax = plt.subplots() - # ax.plot(xi, yi) - # ax.plot(x, y) - # ax.margins(0.05) - # plt.show() - - return v[:-1] - - -def id_map(x, rng=None): - return x - - -def zero_wav_mels_when_silent_center( - mels, mel_ps, zero_mels, zero=-4, t_secs=0.25, verbose=False -): - if t_secs is None: - return {"mel": mels, "changed": False} - t_size = t_secs * mel_ps - _, t_axis = mels.shape - if t_size >= t_axis: - # 원하는 구간이 원래 보고 있는 구간보다 크다면 그대로 준다. - return {"mel": mels, "changed": False} - - t_size_half = int(t_size * 0.5) - if verbose: - print(f"t_axis:{t_axis}, t_size_half: {t_size_half}") - t_axis_s, t_axis_e = int(t_axis / 2) - t_size_half, int(t_axis / 2) + t_size_half - t_axis_s, t_axis_e = max(t_axis_s, 0), min(t_axis_e, t_axis) - if (mels[:, t_axis_s:t_axis_e] == -4).all(): - return {"mel": zero_mels, "changed": True} - - return {"mel": mels, "changed": False} - - -g_cached_fps = {} -g_cached_frames = {} -g_cached_mels = {} -g_cached_pickle = {} - - -class LipGanDS(Dataset): - def __init__(self, images, audios, args, phase, verbose=False): - self.args = args - self.phase = phase - self.mel_step_size = args.mel_step_size - self.mel_ps = args.mel_ps - self.images = images - self.audios = audios - self.mask_ver = ( - list(args.mask_ver) - if isinstance(args.mask_ver, (list, tuple)) - else [args.mask_ver] - ) - self.num_ips = args.num_ips - self.mel_trsf_ver = args.mel_trsf_ver - self.mel_norm_ver = args.mel_norm_ver - self.mels = {} - self.preds = {} - self.verbose = verbose - # snow:96, -4 은 mel 만드는데서 하드코딩되어있는 값이다. - self.zero_mels = np.full((96, self.mel_step_size), -4, dtype=np.float32) - self.silent_secs = ( - None if "silent_secs" not in args.keys() else args["silent_secs"] - ) - self.keying_mask_ver = ( - args.keying_mask_ver if "keying_mask_ver" in self.args else None - ) - if self.verbose and self.silent_secs is not None: - print("!! silent_secs:", self.silent_secs) - - self.smoothing_mask = True if args.smoothing_mask else False - if self.smoothing_mask and self.verbose: - print("!! smoothing_mask is True !!") - - def calc_half_window_size(self, fps): - mel_step_secs = self.mel_step_size * 1.0 / self.mel_ps - a_frame_secs = 1.0 / fps - return int(mel_step_secs / a_frame_secs / 2.0) - - def __len__(self): - return len(self.images) - - def __getitem__(self, idx): - ret_0 = self.choose_image(idx) - if ret_0 is None: - return self[idx + 1] % len(self.images) - - ret_1 = self.choose_audio(idx) - if ret_1 is None: - return self[idx + 1] % len(self.images) - mel = zero_wav_mels_when_silent_center( - mels=ret_1["mel"], - mel_ps=self.mel_ps, - zero_mels=self.zero_mels, - t_secs=self.silent_secs, - verbose=self.verbose, - ) - - return { - "mel": mel["mel"], - "ips": ret_0["ips"], - "mask": ret_0["mask"], - "img_gt_with_alpha": ret_0["img_gt_with_alpha"], - "filename": str(ret_0["filename"]), - } - - def read_fps(self, dir_name): - if str(dir_name) not in g_cached_fps: - with open(dir_name / "fps.txt") as f: - fps = float(f.read()) - return fps - g_cached_fps[str(dir_name)] = fps - return g_cached_fps[str(dir_name)] - - def get_frames(self, dir_name): - if str(dir_name) not in g_cached_frames: - frames = glob(escape(str(dir_name)) + "/*.jpg") - if len(frames) == 0: - frames = glob(escape(str(dir_name)) + "/*.png") - g_cached_frames[str(dir_name)] = frames - return frames - return g_cached_frames[str(dir_name)] - - def load_mel(self, dir_name): - if str(dir_name) not in g_cached_mels: - mel_fname = dir_name / "mels.npz" - if 0 < self.mel_norm_ver: - mel_fname = dir_name / f"mels_v{self.mel_norm_ver}.npz" - - if self.mel_trsf_ver == 0: - if random.randint(0, 1) == 0: - mel_fnames = [dir_name / f"mels_{i:02d}.npz" for i in range(10)] - mel_fname_2 = random.choice(mel_fnames) - if Path(mel_fname_2).exists(): - mel_fname = mel_fname_2 - - with np.load(str(mel_fname)) as f: - mel = f["spec"] - # mel = np.load(str(mel_fname))['spec'] - return mel - g_cached_mels[str(dir_name)] = mel - return g_cached_mels[str(dir_name)] - - def load_processed_wav(self, dir_name, model_type): - if str(dir_name) not in g_cached_mels: - if model_type == "stf_v3": - mel_fname = dir_name / "processed_audio.npy" - mel = np.load(str(mel_fname)) - return mel - g_cached_mels[str(dir_name)] = mel - return g_cached_mels[str(dir_name)] - - def read_pickle(self, dir_name): - if str(dir_name) not in g_cached_pickle: - # print('pikcle_dir:', dir_name) - df = pd.read_pickle(dir_name / "df_fan.pickle") - preds = df.set_index("frame_idx")["cropped_pts2d"] - # g_cached_pickle[str(dir_name)] = preds - return preds - return g_cached_pickle[str(dir_name)] - - def choose_audio(self, idx): - images = self.audios - - args = self.args - img_name = Path(images[idx]) - gt_fname = img_name.name - dir_name = img_name.parent - - if args.model_type == "stf_v3": - mel = self.load_processed_wav(dir_name, args.model_type) - fps = self.read_fps(dir_name) - mel = get_processed_audio_segment(gt_fname, mel, fps, 16000) - else: - if dir_name not in self.mels.keys(): - self.mels[dir_name] = self.load_mel(dir_name) - mel = self.mels[dir_name] - fps = self.read_fps(dir_name) - mel = get_audio_segment( - gt_fname, - mel, - self.mel_step_size, - self.mel_ps, - fps, - self.calc_half_window_size(fps), - ) - - if mel is None or mel.shape[1] != self.mel_step_size: - return None - - if sum(np.isnan(mel.flatten())) > 0: - return None - - if mel is not None and (args.model_type in ["ae", "stf_v1", "aeltu", "stf_v2"]): - mel = mel.astype(np.float32) - - return { - "mel": mel, - } - - def choose_image(self, idx): - images = self.images - - args = self.args - img_name = Path(images[idx]) - gt_fname = img_name.name - dir_name = img_name.parent - - sidx = frame_id(gt_fname) - - img_gt = cv2.imread(str(img_name), cv2.IMREAD_UNCHANGED) - - h, w = img_gt.shape[:2] - assert (h, w) == (args.img_size, args.img_size) - - masked = img_gt[:, :, :3].copy() - img_ip = masked * 2.0 / 255.0 - 1.0 - - if dir_name not in self.preds.keys(): - self.preds[dir_name] = self.read_pickle(dir_name) - - preds = self.preds[dir_name] - if preds[sidx] is None: - # snow : 인사하는 템플릿이 들어오면서 preds 가 없는 경우가 생겼다. - # 이런 경우, 마스크 없이 원래 이미지를 그대로 준다. - mask = np.zeros_like(masked, dtype=np.uint8) - else: - mask_ver = random.choice(self.mask_ver) - pts = calc_poly[mask_ver](preds[sidx], masked.shape[0], randomness=False) - if self.keying_mask_ver is not None: - keying_pts = calc_poly[self.keying_mask_ver]( - preds[sidx], masked.shape[0], randomness=False - ) - else: - keying_pts = pts - - if self.smoothing_mask: - pts = smoothing_mask(pts) - masked = masking(masked, pts) - mask = np.zeros_like(masked, dtype=np.uint8) - mask = masking(mask, keying_pts) - - img_ips = [img_ip for _ in range(self.num_ips)] - ips = np.concatenate([masked * 2.0 / 255.0 - 1.0] + img_ips, axis=2) - - return { - "ips": ips.astype(np.float32), - "mask": mask, - "img_gt_with_alpha": img_gt, - "filename": img_name, - } diff --git a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/mask_history.py b/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/mask_history.py deleted file mode 100644 index 6c6b51c3d9bc1831069c4f273247168dbdc6a7de..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/mask_history.py +++ /dev/null @@ -1,2379 +0,0 @@ -import random - -import numpy as np - - -def calc_poly_v6(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = pts[30] - y10 = h * 0.97 - y6 = y10 - y8 = y10 - - p13 = pts[13] - rand(0.3, 0.4) * (pts[13] - p0) - p12 = pts[12] - rand(0.1, 0.15) * (pts[12] - p0) - p11 = pts[11] + rand(0.18, 0.22) * (pts[11] - p0) - p11[0] = pts[11][0] - p10 = [pts[9][0], y10] - - p08 = [pts[8][0], y8] - - p06 = [pts[7][0], y6] - p05 = pts[5] + rand(0.18, 0.22) * (pts[5] - p0) - p05[0] = pts[5][0] - p04 = pts[4] - rand(0.1, 0.15) * (pts[4] - p0) - p03 = pts[3] - rand(0.3, 0.4) * (pts[3] - p0) - - pts = [p0, p0, p13, p12, p11, p10, p08, p06, p05, p04, p03, p03] - return np.round(np.array(pts)).astype(np.int32) - - -def calc_poly_v7(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = pts[29] - y10 = h * 0.97 - y6 = y10 - y8 = y10 - - p14 = pts[14] - rand(0.05, 0.10) * (pts[14] - p0) - p12 = pts[12] - rand(0.01, 0.05) * (pts[12] - p0) - p11 = pts[11] + rand(0.18, 0.22) * (pts[11] - p0) - p11[0] = pts[11][0] - p10 = [pts[9][0], y10] - - p08 = [pts[8][0], y8] - - p06 = [pts[7][0], y6] - p05 = pts[5] + rand(0.18, 0.22) * (pts[5] - p0) - p05[0] = pts[5][0] - p04 = pts[4] - rand(0.01, 0.05) * (pts[4] - p0) - p02 = pts[2] - rand(0.05, 0.10) * (pts[2] - p0) - - pts = [p0, p0, p14, p12, p11, p10, p08, p06, p05, p04, p02, p02] - return np.round(np.array(pts)).astype(np.int32) - - -def calc_poly_v8(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = pts[27] - y10 = h * 0.97 - y6 = y10 - y8 = y10 - - p14 = pts[15] - rand(0.05, 0.10) * (pts[14] - p0) - p12 = pts[12] - rand(0.01, 0.05) * (pts[12] - p0) - p11 = pts[11] + rand(0.18, 0.22) * (pts[11] - p0) - p11[0] = pts[11][0] - p10 = [pts[9][0], y10] - - p08 = [pts[8][0], y8] - - p06 = [pts[7][0], y6] - p05 = pts[5] + rand(0.18, 0.22) * (pts[5] - p0) - p05[0] = pts[5][0] - p04 = pts[4] - rand(0.01, 0.05) * (pts[4] - p0) - p02 = pts[1] - rand(0.05, 0.10) * (pts[2] - p0) - - pts = [p0, p0, p14, p12, p11, p10, p08, p06, p05, p04, p02, p02] - return np.round(np.array(pts)).astype(np.int32) - - -def calc_poly_v9(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = pts[27] - y10 = h * 0.99 - y6 = y10 - y8 = y10 - - p15 = pts[15] - rand(0.01, 0.10) * (pts[15] - p0) - p13 = pts[13] - rand(0.01, 0.10) * (pts[13] - pts[30]) - p12 = pts[12] - rand(0.01, 0.05) * (pts[12] - pts[30]) - p11 = pts[11] + rand(0.18, 0.22) * (pts[11] - pts[30]) - p11[0] = pts[11][0] - p10 = [pts[9][0], y10] - - p08 = [pts[8][0], y8] - - p06 = [pts[7][0], y6] - p05 = pts[5] + rand(0.18, 0.22) * (pts[5] - pts[30]) - p05[0] = pts[5][0] - p04 = pts[4] - rand(0.01, 0.05) * (pts[4] - pts[30]) - p03 = pts[3] - rand(0.01, 0.10) * (pts[3] - pts[30]) - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - - pts = [p0, p15, p13, p12, p11, p10, p08, p06, p05, p04, p03, p01] - return np.round(np.array(pts)).astype(np.int32) - - -# 김경수 아나운서 데모용 -def calc_poly_v10(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = pts[27] - y10 = h * 0.99 - y6 = y10 - y8 = y10 - - p15 = pts[15] - rand(0.01, 0.10) * (pts[15] - p0) - p13 = pts[13] - rand(0.01, 0.10) * (pts[13] - pts[30]) - p12 = pts[12] - rand(0.01, 0.05) * (pts[12] - pts[30]) - p11 = pts[11] + rand(0.18, 0.22) * (pts[11] - pts[30]) - p11[0] = pts[11][0] - p10 = [pts[9][0], y10] - - p08 = [pts[8][0], y8] - - p06 = [pts[7][0], y6] - p05 = pts[5] + rand(0.18, 0.22) * (pts[5] - pts[30]) - p05[0] = (pts[5][0] + pts[6][0]) / 2 - p04 = pts[4] - rand(0.01, 0.05) * (pts[4] - pts[30]) - p03 = pts[3] - rand(0.01, 0.10) * (pts[3] - pts[30]) - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - - pts = [p0, p15, p13, p12, p11, p10, p08, p06, p05, p04, p03, p01] - return np.round(np.array(pts)).astype(np.int32) - - -# CNA 아나운서 데모용 -def calc_poly_v11(pts, h, randomness=False): - def rand(a, b): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - p0 = pts[27] - y10 = h * 0.99 - y6 = y10 - y8 = y10 - - # print('4') - # p15 = pts[15] - rand(0.01, 0.10)*(pts[15] - p0) - p15 = pts[15] - rand(0.08, 0.12) * (pts[15] - p0) - p13 = pts[13] - rand(0.01, 0.10) * (pts[13] - pts[30]) - p12 = pts[12] - rand(0.01, 0.05) * (pts[12] - pts[30]) - p11 = pts[11] + rand(0.18, 0.22) * (pts[11] - pts[30]) - p11[0] = pts[11][0] - p10 = [pts[9][0], y10] - - p08 = [pts[8][0], y8] - - p06 = [pts[7][0], y6] - p05 = pts[5] + rand(0.18, 0.22) * (pts[5] - pts[30]) - p05[0] = (pts[5][0] + pts[6][0]) / 2 - p04 = pts[4] - rand(0.01, 0.05) * (pts[4] - pts[30]) - p03 = pts[3] - rand(0.01, 0.10) * (pts[3] - pts[30]) - # p01 = pts[1] - rand(0.01, 0.10)*(pts[1] - p0) - p01 = pts[1] - rand(0.08, 0.12) * (pts[1] - p0) - - pts = [p0, p15, p13, p12, p11, p10, p08, p06, p05, p04, p03, p01] - return np.round(np.array(pts)).astype(np.int32) - - -# 이민영 강사 -def calc_poly_v21(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = pts[27] - y10 = h * 0.99 - y6 = y10 - y8 = y10 - - p15 = pts[15] - rand(0.01, 0.10) * (pts[15] - p0) - p13 = pts[13] - rand(0.01, 0.10) * (pts[13] - pts[30]) - p12 = pts[12] + rand(0.01, 0.05) * (pts[12] - pts[30]) - p11 = pts[11] + rand(0.18, 0.22) * (pts[11] - pts[30]) - - p11[0] = pts[11][0] - p10 = [pts[9][0], y10] - - p08 = [pts[8][0], y8] - - p06 = [pts[7][0], y6] - p05 = pts[5] + rand(0.18, 0.22) * (pts[5] - pts[30]) - p05[0] = (pts[5][0] + pts[6][0]) / 2 - p04 = pts[4] - rand(0.01, 0.05) * (pts[4] - pts[30]) - p03 = pts[3] - rand(0.01, 0.05) * (pts[3] - pts[30]) - p01 = pts[1] - rand(0.01, 0.05) * (pts[1] - p0) - - p05[0] = p03[0] - p04[0] = p03[0] - - p11[0] = p13[0] - p12[0] = p13[0] - - pts = [ - p0, # 미간 - p15, - p13, - p12, - p11, # 우리가 보기에 오른쪽. p15가 위, p11이 아래 - p10, - p08, - p06, # 맨 아래 - p05, - p04, - p03, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p05가 아래 - return np.round(np.array(pts)).astype(np.int32) - - -# 이민영 강사, 측면 머리카락 안 가리게 -def calc_poly_v22(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = pts[27] - y10 = h * 0.99 - y6 = y10 - y8 = y10 - - p15 = pts[15] - rand(0.01, 0.10) * (pts[15] - p0) - p13 = pts[13] - rand(0.01, 0.10) * (pts[13] - pts[30]) - # p12 = pts[12] + rand(0.01, 0.05)*(pts[12] - pts[30]) - p12 = pts[12] + rand(0.10, 0.10) * (pts[12] - pts[30]) - p11 = pts[11] + rand(0.12, 0.12) * (pts[11] - pts[30]) - - p11[0] = pts[11][0] - p10 = [pts[9][0], y10] - - p08 = [pts[8][0], y8] - - p06 = [pts[7][0], y6] - p05 = pts[5] + rand(0.18, 0.22) * (pts[5] - pts[30]) - p05[0] = (pts[5][0] + pts[6][0]) / 2 - p04 = pts[4] - rand(0.01, 0.05) * (pts[4] - pts[30]) - p03 = pts[3] - rand(0.01, 0.05) * (pts[3] - pts[30]) - p01 = pts[1] - rand(0.01, 0.05) * (pts[1] - p0) - - p05[0] = p03[0] - p04[0] = p03[0] - - # p11[0] = p13[0] - # p12[0] = p13[0] - - pts = [ - p0, # 미간 - p15, - p13, - p12, - p11, # 우리가 보기에 오른쪽. p15가 위, p11이 아래 - p10, - p08, - p06, # 맨 아래 - p05, - p04, - p03, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p05가 아래 - return np.round(np.array(pts)).astype(np.int32) - - -# 이민영 강사, 측면 위 22번 마스크에서 얼굴을 좀 더 가리게 p13번을 밖으로 더 빼냄. -def calc_poly_v23(pts, h, randomness=True): - def rand_(a, b): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e) - - p0 = pts[27] - y10 = h * 0.99 - y6 = y10 - y8 = y10 - - p15 = pts[15] - rand(0.01, 0.10) * (pts[15] - p0) - p13 = pts[13] + rand(0.01, 0.10) * (pts[13] - pts[30]) - # p12 = pts[12] + rand(0.01, 0.05)*(pts[12] - pts[30]) - p12 = pts[12] + rand(0.10, 0.10) * (pts[12] - pts[30]) - p11 = pts[11] + rand(0.12, 0.12) * (pts[11] - pts[30]) - - p11[0] = pts[11][0] - p10 = [pts[9][0], y10] - - p08 = [pts[8][0], y8] - - p06 = [pts[7][0], y6] - p05 = pts[5] + rand(0.18, 0.22) * (pts[5] - pts[30]) - p05[0] = (pts[5][0] + pts[6][0]) / 2 - p04 = pts[4] - rand(0.01, 0.05) * (pts[4] - pts[30]) - p03 = pts[3] - rand(0.01, 0.05) * (pts[3] - pts[30]) - p01 = pts[1] - rand(0.01, 0.05) * (pts[1] - p0) - - p05[0] = p03[0] - p04[0] = p03[0] - - # p11[0] = p13[0] - # p12[0] = p13[0] - - pts = [ - p0, # 미간 - p15, - p13, - p12, - p11, # 우리가 보기에 오른쪽. p15가 위, p11이 아래 - p10, - p08, - p06, # 맨 아래 - p05, - p04, - p03, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p05가 아래 - return np.round(np.array(pts)).astype(np.int32) - - -# 이민영 강사, 측면 위 23번 마스크에서 얼굴을 좀 더 가리게 p15번을 밖으로 더 빼냄. -def calc_poly_v24(pts, h, randomness=True): - def rand_(a, b): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e) - - p0 = pts[27] - y10 = h * 0.99 - y6 = y10 - y8 = y10 - - p15 = pts[15] + rand(0.01, 0.10) * (pts[15] - p0) - p13 = pts[13] + rand(0.01, 0.10) * (pts[13] - pts[30]) - p12 = pts[12] + rand(0.10, 0.10) * (pts[12] - pts[30]) - p11 = pts[11] + rand(0.12, 0.12) * (pts[11] - pts[30]) - - p11[0] = pts[11][0] - p10 = [pts[9][0], y10] - - p08 = [pts[8][0], y8] - - p06 = [pts[7][0], y6] - p05 = pts[5] + rand(0.18, 0.22) * (pts[5] - pts[30]) - p05[0] = (pts[5][0] + pts[6][0]) / 2 - p04 = pts[4] - rand(0.01, 0.05) * (pts[4] - pts[30]) - p03 = pts[3] - rand(0.01, 0.05) * (pts[3] - pts[30]) - p01 = pts[1] - rand(0.01, 0.05) * (pts[1] - p0) - - p05[0] = p03[0] - p04[0] = p03[0] - - # p11[0] = p13[0] - # p12[0] = p13[0] - - pts = [ - p0, # 미간 - p15, - p13, - p12, - p11, # 우리가 보기에 오른쪽. p15가 위, p11이 아래 - p10, - p08, - p06, # 맨 아래 - p05, - p04, - p03, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p05가 아래 - return np.round(np.array(pts)).astype(np.int32) - - -# 박은보, 옆모습, 아주 많이 가림 -def calc_poly_pwb_side_v39(pts, h, randomness=True): - def rand(a, b): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - p0 = pts[27] - y10 = h * 0.85 - y6 = y10 - y8 = y10 - - p15 = pts[15] + rand(0.01, 0.05) * ( - pts[15] - p0 - ) # - rand(0.01, 0.10)*(pts[15] - p0) - p13 = pts[13] + rand(0.01, 0.05) * ( - pts[13] - pts[30] - ) # - rand(0.01, 0.10)*(pts[13] - pts[30]) - p12 = pts[12] + rand(0.05, 0.10) * ( - pts[12] - pts[30] - ) # - rand(0.01, 0.05)*(pts[12] - pts[30]) - p11 = pts[11] + rand(0.18, 0.22) * ( - pts[11] - pts[30] - ) # + rand(0.18, 0.22)*(pts[11] - pts[30]) - p11[0] = pts[11][0] - p10 = [pts[10][0], y10] - p08 = [pts[9][0], y8] - p06 = [pts[8][0], y6] - p05 = pts[5] + rand(0.18, 0.22) * (pts[5] - pts[30]) - p05[0] = pts[5][0] - p04 = pts[4] + rand(0.01, 0.05) * (pts[4] - pts[30]) - p03 = pts[3] + rand(0.01, 0.10) * (pts[3] - pts[30]) - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - - # pts = [p0, p15, p13, p12, p11, p10, p08, p06, p05, p04, p03, p01] - pts = [p0, p15, p13, p12, p10, p08, p06, p05, p04, p03, p01] - return np.round(np.array(pts)).astype(np.int32) - - -# 휴넷 이민영, 꽁지머리쪽 안가리기 위해 새로 만듦 -def calc_poly_hunet_side_1(pts, h, randomness=True): - def rand(a, b): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - p0 = pts[27] - y10 = h * 0.80 - y6 = y10 - y8 = y10 - - p36 = pts[36] + rand(0.12, 0.18) * (pts[36] - pts[30]) - p45 = pts[45] + rand(0.12, 0.18) * (pts[45] - pts[30]) - - p15 = pts[15] + rand(0.20, 0.28) * (pts[15] - p0) - p13 = pts[13] + rand(0.20, 0.28) * (pts[13] - pts[30]) - p12 = pts[12] + rand(0.20, 0.28) * (pts[12] - pts[30]) - p11 = pts[11] + rand(0.20, 0.28) * (pts[11] - pts[30]) - p11[0] = pts[11][0] + rand(0.18, 0.22) * (pts[11][0] - pts[30][0]) - p10 = [pts[10][0], y10] - p36[1] = y10 - p45[1] = y10 - - p08 = [pts[9][0], y8] - p06 = [pts[8][0], y6] - p05 = pts[5] + rand(0.18, 0.22) * (pts[5] - pts[30]) - p05[0] = pts[5][0] - p04 = pts[4] + rand(0.01, 0.13) * (pts[4] - pts[30]) - p03 = pts[3] + rand(0.01, 0.10) * (pts[3] - pts[30]) - p01 = pts[1] - rand(0.01, 0.03) * (pts[1] - p0) - - pts = [p0, p15, p13, p45, p36, p04, p03, p01] - return np.round(np.array(pts)).astype(np.int32) - - -# 한투, 묶음 머리 안가리기 위해 새로 만듦 -def calc_poly_hantu_side_1(pts, h, randomness=True): - def rand(a, b): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - p0 = pts[27] - y10 = h * 0.80 - y6 = y10 - y8 = y10 - - p36 = pts[36] + rand(0.12, 0.18) * (pts[36] - pts[30]) - p45 = pts[45] + rand(0.12, 0.18) * (pts[45] - pts[30]) - - p15 = pts[15] + rand(0.20, 0.28) * (pts[15] - p0) - p13 = pts[13] + rand(0.20, 0.28) * (pts[13] - pts[30]) - p12 = pts[12] + rand(0.20, 0.28) * (pts[12] - pts[30]) - p11 = pts[11] + rand(0.20, 0.28) * (pts[11] - pts[30]) - p11[0] = pts[11][0] + rand(0.18, 0.22) * (pts[11][0] - pts[30][0]) - p10 = [pts[10][0], y10] - p36[1] = y10 - p45[1] = y10 - - p08 = [pts[9][0], y8] - p06 = [pts[8][0], y6] - p05 = pts[5] + rand(0.18, 0.22) * (pts[5] - pts[30]) - p05[0] = pts[5][0] - p04 = pts[4] + rand(0.01, 0.05) * (pts[4] - pts[30]) - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - - pts = [p0, p15, p13, p45, p36, p04, p01] - return np.round(np.array(pts)).astype(np.int32) - - -# 백하나, 볼 옆에 머리카락 안 가리기 위해 새로 만듦 -def calc_poly_class101_side_1(pts, h, randomness=True): - def rand(a, b): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - p0 = pts[27] - y10 = h * 0.80 - # y6 = y10 - # y8 = y10 - - p36 = pts[36] + rand(0.12, 0.18) * (pts[36] - pts[30]) - # p42 = pts[42] + rand(0.12, 0.18)*(pts[42] - pts[30]) - - p15 = pts[15] + rand(0.20, 0.28) * (pts[15] - p0) - p13 = pts[13] + rand(0.20, 0.28) * (pts[13] - pts[30]) - p12 = pts[12] + rand(0.20, 0.28) * (pts[12] - pts[30]) - p11 = pts[11] + rand(0.20, 0.28) * (pts[11] - pts[30]) - p10 = pts[10] + rand(0.28, 0.28) * (pts[10] - pts[30]) - p08 = pts[8] + rand(0.2, 0.2) * (pts[8] - pts[30]) - # p11[0] = pts[11][0] + rand(0.18, 0.22)*(pts[11][0] - pts[30][0]) - - # p08[1] = y10 - # p36[1] = y10 - # p42[1] = y10 - - # p08 = [pts[9][0], y8] - # p06 = [pts[8][0], y6] - p05 = pts[5] + rand(0.18, 0.22) * (pts[5] - pts[30]) - p05[0] = pts[5][0] - p03 = pts[3] + rand(0.01, 0.05) * (pts[3] - pts[30]) - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - - # p10[1] = p08[1] - p05[1] = p08[1] - p36[1] = p08[1] - - pts = [p0, p15, p13, p12, p11, p10, p08, p36, p05, p03, p01] - return np.round(np.array(pts)).astype(np.int32) - - -# 박은보, 옆모습, 많이 가리지 않음, 예전 v9와 유사 -def calc_poly_pwb_side_v39_1(pts, h, randomness=True): - def rand(a, b): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - p0 = pts[27] - y10 = h * 0.85 - y6 = y10 - y8 = y10 - - p15 = pts[15] - rand(0.01, 0.10) * (pts[15] - p0) - p13 = pts[13] - rand(0.01, 0.10) * (pts[13] - pts[30]) - p12 = pts[12] - rand(0.01, 0.05) * (pts[12] - pts[30]) - p11 = pts[11] + rand(0.18, 0.22) * (pts[11] - pts[30]) - p11[0] = pts[11][0] - p10 = [pts[10][0], y10] - p08 = [pts[9][0], y8] - p06 = [pts[8][0], y6] - p05 = pts[5] + rand(0.18, 0.22) * (pts[5] - pts[30]) - p05[0] = pts[5][0] - p04 = pts[4] - rand(0.01, 0.05) * (pts[4] - pts[30]) - p03 = pts[3] - rand(0.01, 0.10) * (pts[3] - pts[30]) - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - - # pts = [p0, p15, p13, p12, p11, p10, p08, p06, p05, p04, p03, p01] - pts = [p0, p15, p13, p12, p10, p08, p06, p05, p04, p03, p01] - return np.round(np.array(pts)).astype(np.int32) - - -def calc_poly_pwb_front_v39_0(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.87 - y6 = y10 - y8 = y10 - - p15 = pts[15] - rand(0.01, 0.08) * (pts[15] - p0) - p13 = pts[13] - rand(0.01, 0.08) * (pts[13] - pts[30]) - p12 = pts[12] - rand(0.01, 0.05) * (pts[12] - pts[30]) - p12[0] = p13[0] + h * 0.01 - - p10 = [pts[9][0], y10] - p08 = [pts[8][0], y8] - p06 = [pts[7][0], y6] - - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - p03 = pts[3] - rand(0.01, 0.10) * (pts[3] - pts[30]) - p04 = pts[4] - rand(0.01, 0.05) * (pts[4] - pts[30]) - p04[0] = p03[0] - h * 0.01 - - pts = [ - p0, # 미간 - p15, - p13, - p12, - # p11, # 우리가 보기에 오른쪽. p15가 위, p11이 아래 - p10, - p08, - p06, # 맨 아래 - # p05, - p04, - p03, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p05가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# 박은보, 옆모습, 아주 많이 가림 -def calc_poly_pwb_side_v39(pts, h, randomness=True): - def rand(a, b): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - p0 = pts[27] - y10 = h * 0.85 - y6 = y10 - y8 = y10 - - p15 = pts[15] + rand(0.01, 0.05) * ( - pts[15] - p0 - ) # - rand(0.01, 0.10)*(pts[15] - p0) - p13 = pts[13] + rand(0.01, 0.05) * ( - pts[13] - pts[30] - ) # - rand(0.01, 0.10)*(pts[13] - pts[30]) - p12 = pts[12] + rand(0.05, 0.10) * ( - pts[12] - pts[30] - ) # - rand(0.01, 0.05)*(pts[12] - pts[30]) - p11 = pts[11] + rand(0.18, 0.22) * ( - pts[11] - pts[30] - ) # + rand(0.18, 0.22)*(pts[11] - pts[30]) - p11[0] = pts[11][0] - p10 = [pts[10][0], y10] - p08 = [pts[9][0], y8] - p06 = [pts[8][0], y6] - p05 = pts[5] + rand(0.18, 0.22) * (pts[5] - pts[30]) - p05[0] = pts[5][0] - p04 = pts[4] + rand(0.01, 0.05) * (pts[4] - pts[30]) - p03 = pts[3] + rand(0.01, 0.10) * (pts[3] - pts[30]) - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - - # pts = [p0, p15, p13, p12, p11, p10, p08, p06, p05, p04, p03, p01] - pts = [p0, p15, p13, p12, p10, p08, p06, p05, p04, p03, p01] - return np.round(np.array(pts)).astype(np.int32) - - -# 박은보, 옆모습, 많이 가리지 않음, 예전 v9와 유사 -def calc_poly_pwb_side_v39_1(pts, h, randomness=True): - def rand(a, b): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - p0 = pts[27] - y10 = h * 0.85 - y6 = y10 - y8 = y10 - - p15 = pts[15] - rand(0.01, 0.10) * (pts[15] - p0) - p13 = pts[13] - rand(0.01, 0.10) * (pts[13] - pts[30]) - p12 = pts[12] - rand(0.01, 0.05) * (pts[12] - pts[30]) - p11 = pts[11] + rand(0.18, 0.22) * (pts[11] - pts[30]) - p11[0] = pts[11][0] - p10 = [pts[10][0], y10] - p08 = [pts[9][0], y8] - p06 = [pts[8][0], y6] - p05 = pts[5] + rand(0.18, 0.22) * (pts[5] - pts[30]) - p05[0] = pts[5][0] - p04 = pts[4] - rand(0.01, 0.05) * (pts[4] - pts[30]) - p03 = pts[3] - rand(0.01, 0.10) * (pts[3] - pts[30]) - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - - # pts = [p0, p15, p13, p12, p11, p10, p08, p06, p05, p04, p03, p01] - pts = [p0, p15, p13, p12, p10, p08, p06, p05, p04, p03, p01] - return np.round(np.array(pts)).astype(np.int32) - - -# 박은보, 측면, 아주 많이 가림 -# calc_poly_pwb_side_v39 보다 더 많이 가림 -# y10 : 99 -> 0.80 로 함 -def calc_poly_pwb_side_v39_11(pts, h, randomness=True): - def rand(a, b): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - p0 = pts[27] - y10 = h * 0.80 - y6 = y10 - y8 = y10 - - p15 = pts[15] + rand(0.04, 0.08) * (pts[15] - p0) - p13 = pts[13] + rand(0.04, 0.08) * (pts[13] - pts[30]) - p12 = pts[12] + rand(0.04, 0.08) * (pts[12] - pts[30]) - p11 = pts[11] + rand(0.04, 0.08) * (pts[11] - pts[30]) - p11[0] = pts[11][0] + rand(0.18, 0.22) * (pts[11][0] - pts[30][0]) - - p10 = pts[10] + rand(0.12, 0.18) * (pts[10] - pts[30]) - p08 = pts[9] + rand(0.12, 0.18) * (pts[9] - pts[30]) - p06 = pts[8] + rand(0.12, 0.18) * (pts[8] - pts[30]) - p05 = pts[5] + rand(0.12, 0.18) * (pts[5] - pts[30]) - p05[0] = pts[5][0] + rand(0.18, 0.22) * (pts[5][0] - pts[30][0]) - - p04 = pts[4] + rand(0.12, 0.18) * (pts[4] - pts[30]) - p03 = pts[3] + rand(0.12, 0.18) * (pts[3] - pts[30]) - p01 = pts[1] + rand(0.12, 0.18) * (pts[1] - p0) - - # p11[0] = pts[11][0] - p10[1] = y10 - p08[1] = y10 - p06[1] = y10 - # p05[0] = pts[5][0] - - pts = [p0, p15, p13, p12, p11, p10, p08, p06, p05, p04, p03, p01] - # pts = [p0, p15, p13, p12, p10, p08, p06, p05, p04, p03, p01] - return np.round(np.array(pts)).astype(np.int32) - - -def calc_poly_pwb_side_v39_12(pts, h, randomness=True): - def rand(a, b): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - p0 = pts[27] - y10 = h * 0.80 - y6 = y10 - y8 = y10 - - p15 = pts[15] + rand(0.20, 0.28) * (pts[15] - p0) - p13 = pts[13] + rand(0.20, 0.28) * (pts[13] - pts[30]) - p12 = pts[12] + rand(0.20, 0.28) * (pts[12] - pts[30]) - p11 = pts[11] + rand(0.20, 0.28) * (pts[11] - pts[30]) - p11[0] = pts[11][0] + rand(0.18, 0.22) * (pts[11][0] - pts[30][0]) - - p10 = pts[10] + rand(0.12, 0.18) * (pts[10] - pts[30]) - p08 = pts[9] + rand(0.12, 0.18) * (pts[9] - pts[30]) - p06 = pts[8] + rand(0.12, 0.18) * (pts[8] - pts[30]) - p05 = pts[5] + rand(0.12, 0.18) * (pts[5] - pts[30]) - p05[0] = pts[5][0] + rand(0.18, 0.22) * (pts[5][0] - pts[30][0]) - - p04 = pts[4] + rand(0.12, 0.18) * (pts[4] - pts[30]) - p03 = pts[3] + rand(0.12, 0.18) * (pts[3] - pts[30]) - p01 = pts[1] + rand(0.12, 0.18) * (pts[1] - p0) - - # p11[0] = pts[11][0] - p10[1] = y10 - p08[1] = y10 - p06[1] = y10 - # p05[0] = pts[5][0] - - pts = [p0, p15, p13, p12, p11, p10, p08, p06, p05, p04, p03, p01] - # pts = [p0, p15, p13, p12, p10, p08, p06, p05, p04, p03, p01] - return np.round(np.array(pts)).astype(np.int32) - - -def calc_poly_pwb_side_v39_13(pts, h, randomness=True): - def rand(a, b): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - p0 = pts[27] - y10 = h * 0.80 - y6 = y10 - y8 = y10 - - p15 = pts[15] + rand(0.20, 0.28) * (pts[15] - p0) - p13 = pts[13] + rand(0.20, 0.28) * (pts[13] - pts[30]) - p12 = pts[12] + rand(0.20, 0.28) * (pts[12] - pts[30]) - p11 = pts[11] + rand(0.20, 0.28) * (pts[11] - pts[30]) - p11[0] = pts[11][0] + rand(0.18, 0.22) * (pts[11][0] - pts[30][0]) - - p36 = pts[36] + rand(0.12, 0.18) * (pts[36] - pts[30]) - p45 = pts[45] + rand(0.12, 0.18) * (pts[45] - pts[30]) - - p05 = pts[5] + rand(0.12, 0.18) * (pts[5] - pts[30]) - p05[0] = pts[5][0] + rand(0.18, 0.22) * (pts[5][0] - pts[30][0]) - - p04 = pts[4] + rand(0.12, 0.18) * (pts[4] - pts[30]) - p03 = pts[3] + rand(0.12, 0.18) * (pts[3] - pts[30]) - p01 = pts[1] + rand(0.12, 0.18) * (pts[1] - p0) - - p36[1] = y10 - p45[1] = y10 - - # pts = [p0, p15, p13, p45, p36, p05, p04, p03, p01] - pts = [p0, p15, p13, p45, p36, p04, p03, p01] - return np.round(np.array(pts)).astype(np.int32) - - -# calc_poly_pwb_side_v39_13 에서 귀 옆을 파낸 것(귀밑에 머리카락 쪽에 초록색이 생겨서 파냄) -def calc_poly_pwb_side_v39_13_1(pts, h, randomness=True): - def rand(a, b): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - p0 = pts[27] - y10 = h * 0.80 - y6 = y10 - y8 = y10 - - p15 = pts[15] + rand(0.20, 0.28) * (pts[15] - p0) - p15_1 = pts[15] - p13 = pts[13] + rand(0.20, 0.28) * (pts[13] - pts[30]) - p12 = pts[12] + rand(0.20, 0.28) * (pts[12] - pts[30]) - p11 = pts[11] + rand(0.20, 0.28) * (pts[11] - pts[30]) - p11[0] = pts[11][0] + rand(0.18, 0.22) * (pts[11][0] - pts[30][0]) - - p36 = pts[36] + rand(0.12, 0.18) * (pts[36] - pts[30]) - p45 = pts[45] + rand(0.12, 0.18) * (pts[45] - pts[30]) - - p05 = pts[5] + rand(0.12, 0.18) * (pts[5] - pts[30]) - p05[0] = pts[5][0] + rand(0.18, 0.22) * (pts[5][0] - pts[30][0]) - - p04 = pts[4] + rand(0.12, 0.18) * (pts[4] - pts[30]) - p03 = pts[3] + rand(0.12, 0.18) * (pts[3] - pts[30]) - p01 = pts[1] + rand(0.12, 0.18) * (pts[1] - p0) - - p36[1] = y10 - p45[1] = y10 - - # pts = [p0, p15, p13, p45, p36, p05, p04, p03, p01] - pts = [p0, p15, p13, p12, p15_1, p45, p36, p04, p03, p01] - return np.round(np.array(pts)).astype(np.int32) - - -# snow : calc_poly_pwb_side_v39_13 에서 오른 쪽 귀쪽을 살있는 부분까지 밀어넣음. -def calc_poly_pwb_side_v39_13_2(pts, h, randomness=True): - def rand(a, b): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - p0 = pts[27] - y10 = h * 0.80 - y6 = y10 - y8 = y10 - - p15 = pts[15] - rand(0.0, 0.01) * (pts[15] - p0) - p13 = pts[13] - rand(0.05, 0.05) * (pts[13] - pts[30]) - p12 = pts[12] - rand(0.05, 0.05) * (pts[12] - pts[30]) - - p36 = pts[36] + rand(0.12, 0.18) * (pts[36] - pts[30]) - p45 = pts[45] + rand(0.12, 0.18) * (pts[45] - pts[30]) - - p05 = pts[5] + rand(0.12, 0.18) * (pts[5] - pts[30]) - p05[0] = pts[5][0] + rand(0.18, 0.22) * (pts[5][0] - pts[30][0]) - - p04 = pts[4] + rand(0.12, 0.18) * (pts[4] - pts[30]) - p03 = pts[3] + rand(0.12, 0.18) * (pts[3] - pts[30]) - p01 = pts[1] + rand(0.12, 0.18) * (pts[1] - p0) - - p12[1] = y10 - p36[1] = y10 - p45[1] = y10 - - # pts = [p0, p15, p13, p45, p36, p05, p04, p03, p01] - pts = [p0, p15, p13, p12, p45, p36, p04, p03, p01] - return np.round(np.array(pts)).astype(np.int32) - - -def calc_poly_pwb_front_v39_1(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.9 - y6 = y10 - y8 = y10 - - p15 = pts[15] - rand(0.02, 0.06) * (pts[15] - p0) - p12 = [p15[0], p15[1] + (y10 - p15[1]) * 0.5] - - p10 = [h * 0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.35, y6] - - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - p04 = [p01[0], p01[1] + (y10 - p01[1]) * 0.5] - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# calc_poly_pwb_front_v39_1 & 22 마스크를 섞음. -def calc_poly_pwb_front_v39_n_22(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.9 - y6 = y10 - y8 = y10 - - # p15 = pts[15] - rand(0.02, 0.06)*(pts[15] - p0) - p15 = pts[15] - rand(0.01, 0.10) * (pts[15] - p0) - - p13 = pts[13] - rand(0.01, 0.10) * (pts[13] - pts[30]) - p12 = [p15[0], p15[1] + (y10 - p15[1]) * 0.5] - - p10 = [h * 0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.35, y6] - - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - p04 = [p01[0], p01[1] + (y10 - p01[1]) * 0.5] - p03 = pts[3] - rand(0.01, 0.05) * (pts[3] - pts[30]) - - pts = [ - p0, # 미간 - p15, - p13, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p03, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# calc_poly_pwb_front_v39_1 & 22 마스크를 섞음. -def calc_poly_pwb_side_v39_n_22(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.9 - y6 = y10 - y8 = y10 - - p15 = pts[15] - rand(0.02, 0.06) * (pts[15] - p0) - p12 = [p15[0], p15[1] + (y10 - p15[1]) * 0.5] - - p10 = [h * 0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.35, y6] - - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - p04 = [p01[0], p01[1] + (y10 - p01[1]) * 0.5] - p04[0] = p04[0] - rand(0.01, 0.10) * (p04[0] - p0[0]) - p03 = pts[3] - rand(0.07, 0.07) * (pts[3] - pts[30]) - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p03, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# calc_poly_pwb_front_v39_1 & 22 마스크를 섞음. -# calc_poly_pwb_side_v39_n_22 보다 귀쪽을 더 바깥으로 판다. -def calc_poly_pwb_side_v39_n_22_1(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.9 - y6 = y10 - y8 = y10 - - p15 = pts[15] - rand(0.02, 0.06) * (pts[15] - p0) - p12 = [p15[0], p15[1] + (y10 - p15[1]) * 0.5] - - p10 = [h * 0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.35, y6] - - p01 = pts[1] + rand(0.01, 0.1) * (pts[1] - p0) - p04 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - p04 = [p04[0], p04[1] + (y10 - p01[1]) * 0.5] - p04[0] = p04[0] - rand(0.01, 0.10) * (p04[0] - p0[0]) - p03 = pts[3] - rand(0.07, 0.07) * (pts[3] - pts[30]) - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p03, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# calc_poly_pwb_front_v39_1 보다 좀 넓게. -def calc_poly_pwb_front_v39_1_1(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.9 - y6 = y10 - y8 = y10 - - p15 = pts[15] + rand(0.01, 0.01) * (pts[15] - p0) - p12 = [p15[0], p15[1] + (y10 - p15[1]) * 0.5] - - p10 = [h * 0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.35, y6] - - p01 = pts[1] + rand(0.01, 0.01) * (pts[1] - p0) - p04 = [p01[0], p01[1] + (y10 - p01[1]) * 0.5] - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# 홍나실용 : 귀를 파고 아래쪽을 좀 올림. -def calc_poly_nasilhong_front_v1(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = max(h * 0.8, (pts[8] + (pts[8] - p0) * rand(0.05, 0.2))[1]) # y80% - y11 = (pts[8] + (pts[8] - p0) * rand(0.1, 0.5))[1] # 턱 위치와 비례 - y6 = y10 - y8 = y10 - - # 정면은, 여기에 귀가 있어서 괜찮고, - # 측면은, 배경과 맡닿게 되는 면이어서 약간 더 과하게 튀어나오게 해준다. - # 그래야 볼과 배경이 나란하게 되어 프레임에 따라 번쩍거리게 되는 문제가 없다. - p15 = pts[15] + rand(-0.04, 0.2) * (pts[15] - p0) - # p15 = pts[15] - rand(0.02, 0.06)*(pts[15] - p0) - p14 = pts[14] + rand(-0.04, 0.1) * (pts[14] - p0) - p14[1] = ((pts[14] + pts[13]) / 2)[1] # pts 13,14 중간으로 높이를 맞춘다.(귀걸이 보이게 하기위해) - p13 = pts[13] + rand(0.00, 0.2) * (pts[13] - p0) - # p12 = [pts[12][0], p14[1] + (y11 - p14[1])*0.5] - p12 = [p13[0], p14[1] + (y11 - p14[1]) * 0.8] - - p10 = [h * 0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.35, y6] - - # 정/측면 모두 여기에 귀가 있어서 괜찮다. - p01 = pts[1] - rand(0.02, 0.06) * (pts[1] - p0) - p02 = pts[2] + rand(-0.06, 0.04) * (pts[2] - p0) - p02[1] = ((pts[2] + pts[3]) / 2)[1] # pts 2,3 중간으로 높이를 맞춘다.(귀걸이 보이게 하기위해) - p03 = pts[3] + rand(0.0, 0.1) * (pts[3] - p0) - p04 = [pts[4][0], p02[1] + (y11 - p02[1]) * 0.8] - - pts = [ - p0, # 미간 - p15, - p14, - p13, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p03, - p02, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# pwb_front_v39_1 에 오른쪽에 약간 일자로 삐져나오게 함. -def calc_poly_pwb_front_v39_1_2(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.9 - y6 = y10 - y8 = y10 - - p15 = pts[15] + rand(-0.06, 0.1) * (pts[15] - p0) - p12 = [p15[0], p15[1] + (y10 - p15[1]) * 0.5] - - p10 = [h * 0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.35, y6] - - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - p04 = [p01[0], p01[1] + (y10 - p01[1]) * 0.5] - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# pwb_front_v39_1 에 오른쪽에 많이 사선으로 삐져나오게 한다. -def calc_poly_pwb_front_v39_1_6(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.9 - y6 = y10 - y8 = y10 - - p15 = pts[15] - rand(0.02, 0.06) * (pts[15] - p0) - p12 = pts[15] + rand(-0.06, 0.3) * (pts[15] - p0) - p12 = [p12[0], p15[1] + (y10 - p15[1]) * 0.5] - - p10 = [h * 0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.35, y6] - - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - p04 = [p01[0], p01[1] + (y10 - p01[1]) * 0.5] - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# pwb_front_v39_1 에 오른쪽에 많이 사선으로 삐져나오게 한다. -# calc_poly_pwb_front_v39_1_6 보다는 좀 적게 사선으로 튀어나온다. -def calc_poly_pwb_front_v39_1_6_1(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.9 - y6 = y10 - y8 = y10 - - p15 = pts[15] - rand(0.02, 0.06) * (pts[15] - p0) - # p15 = pts[15] - rand(0.02, 0.02)*(pts[15] - p0) - p12 = pts[15] + rand(-0.06, 0.1) * (pts[15] - p0) - p12 = [p12[0], p15[1] + (y10 - p15[1]) * 0.5] - - p10 = [h * 0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.35, y6] - - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - p04 = [p01[0], p01[1] + (y10 - p01[1]) * 0.5] - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# pwb_front_v39_1 에 오른쪽에 많이 사선으로 삐져나오게 한다. -def calc_poly_pwb_front_v39_1_7(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.9 - y6 = y10 - y8 = y10 - - p15 = pts[15] + rand(-0.06, 0.2) * (pts[15] - p0) - p12 = pts[15] + rand(-0.06, 0.3) * (pts[15] - p0) - p12 = [p12[0], p15[1] + (y10 - p15[1]) * 0.5] - - p10 = [h * 0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.35, y6] - - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - p04 = [p01[0], p01[1] + (y10 - p01[1]) * 0.5] - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# pwb_front_v39_1 에 오른쪽에 많이 사선으로 삐져나오게 한다. -# calc_poly_pwb_front_v39_1_7 과 위쪽은 같고, 박스가 작은 경우 턱이 나오는 경우가 있어서, 아래쪽 턱보다 길게 만든다. -def calc_poly_pwb_front_v39_1_7_1(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.96 - - y6 = y10 - y8 = y10 - - p15 = pts[15] + rand(-0.06, 0.2) * (pts[15] - p0) - p12 = pts[15] + rand(-0.06, 0.3) * (pts[15] - p0) - p12 = [p12[0], p15[1] + (y10 - p15[1]) * 0.5] - - p10 = [h * 0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.35, y6] - - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - p04 = [p01[0], p01[1] + (y10 - p01[1]) * 0.5] - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# pwb_front_v39_1 에 오른쪽에 많이 사선으로 삐져나오게 한다. -# calc_poly_pwb_front_v39_1_7_1 을 왼쪽을 최대값으로 변경한다. -def calc_poly_pwb_front_v39_1_7_2(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.96 - - y6 = y10 - y8 = y10 - - p15 = pts[15] + rand(-0.06, 0.2) * (pts[15] - p0) - p12 = pts[15] + rand(-0.06, 0.3) * (pts[15] - p0) - p12 = [p12[0], p15[1] + (y10 - p15[1]) * 0.5] - - p10 = [h * 0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.35, y6] - - p01 = pts[1] - rand(0.00, 0.00) * (pts[1] - p0) - p04 = [p01[0], p01[1] + (y10 - p01[1]) * 0.5] - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# pwb_front_v39_1 에 오른쪽에 약간 일자로 삐져나오게, 그리고 일자가 아닌 약간 안쪽으로 떨어지게. -# H22 회색니트에서 테스트함. -def calc_poly_pwb_front_v39_1_8(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.9 - y6 = y10 - y8 = y10 - - p15 = pts[15] + rand(-0.06, 0.2) * (pts[15] - p0) - p12 = pts[15] - rand(0.02, 0.2) * (pts[15] - p0) - p12 = [p12[0], p12[1] + (y10 - p15[1]) * 0.5] - - p10 = [h * 0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.35, y6] - - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - p04 = [p01[0], p01[1] + (y10 - p01[1]) * 0.5] - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# pwb_front_v39_1 에 오른쪽에 약간 일자로 삐져나오게, 그리고 일자가 아닌 약간 안쪽으로 떨어지게. -# calc_poly_pwb_front_v39_1_8 보다 오른쪽으로 좀 더 나온다. -def calc_poly_pwb_front_v39_1_8_1(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.9 - y6 = y10 - y8 = y10 - - p15 = pts[15] + rand(-0.06, 0.3) * (pts[15] - p0) - p12 = pts[15] - rand(0.02, 0.2) * (pts[15] - p0) - p12 = [p12[0], p12[1] + (y10 - p15[1]) * 0.5] - - p10 = [h * 0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.35, y6] - - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - p04 = [p01[0], p01[1] + (y10 - p01[1]) * 0.5] - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# 아랫쪽을 그냥 네모로. -def calc_poly_pwb_front_v39_1_9(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.9 - y6 = y10 - y8 = y10 - - p15 = pts[15] - rand(0.02, 0.06) * (pts[15] - p0) - p12 = [p15[0], p15[1] + (y10 - p15[1]) * 0.5] - - p10 = [p15[0], y10] - p08 = [h * 0.5, y8] - - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - p04 = [p01[0], p01[1] + (y10 - p01[1]) * 0.5] - p06 = [p01[0], y6] - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# pwb_front_v39_1 에 양 옆을 넓은 쪽을 택한다. -def calc_poly_pwb_front_v39_1_3(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.9 - y6 = y10 - y8 = y10 - - p15 = pts[15] - rand(0.02, 0.02) * (pts[15] - p0) - p12 = pts[15] - rand(0.02, 0.02) * (pts[15] - p0) - p12 = [p12[0], p12[1] + (y10 - p12[1]) * 0.5] - - p10 = [h * 0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.35, y6] - - p01 = pts[1] - rand(0.02, 0.02) * (pts[1] - p0) - p04 = pts[1] - rand(0.02, 0.02) * (pts[1] - p0) - p04 = [p04[0], p04[1] + (y10 - p04[1]) * 0.5] - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# snow : calc_poly_pwb_front_v39_1 에서 왼쪽 아래(p06, p04)을 좀 좁게 한다. -def calc_poly_pwb_front_v39_1_4(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.85 - y6 = y10 - y8 = y10 - - p15 = pts[15] - rand(0.02, 0.06) * (pts[15] - p0) - p12 = [p15[0], p15[1] + (y10 - p15[1]) * 0.5] - - p10 = [h * 0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.4, y6] - p06[0] = pts[6][0] - - # p01 = pts[1] - rand(0.01, 0.20)*(pts[1] - p0) - p01 = [pts[3][0], pts[1][1]] - rand(0.01, 0.0) * (pts[1] - p0) - p04 = [pts[4][0], max(p01[1] + (y10 - p01[1]) * 0.5, pts[9][1])] - p04 = p04 - rand(0.02, 0.1) * (np.array(p04) - p0) - # p04 = [pts[4][0], p01[1] + (y10-p01[1])*0.5] - # p04 = [(pts[4][0]+pts[5][0])//2, p01[1] + (y10-p01[1])*0.5] - # p04 = [p01[0], p01[1] + (y10-p01[1])*0.5] - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# pwb_front_v39_1 에 오른 쪽을 높고 넓게 한다. -def calc_poly_pwb_front_v39_1_5(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - # p0 = [pts[27][0], pts[27][1] + h*0.09] - p0 = [pts[27][0], pts[27][1] + h * 0.05] - # p0 = pts[27] - y10 = h * 0.9 - y6 = y10 - y8 = y10 - - p15 = (pts[15] + pts[16]) / 2 + rand(0.02, 0.2) * (pts[15] - p0) - p12 = [p15[0], p15[1] + (y10 - p15[1]) * 0.5] - - p10 = [h * 0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.35, y6] - - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - p04 = [p01[0], p01[1] + (y10 - p01[1]) * 0.5] - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -def calc_poly_hantu_front_v39_0(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - # p0 = [pts[27][0], (pts[27][1] + 3*pts[28][1])/4] - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.9 - y6 = y10 - y8 = y10 - - p15 = pts[15] - rand(0.06, 0.08) * (pts[15] - p0) - p12 = [ - (pts[15] - rand(0.02, 0.06) * (pts[15] - p0))[0], - p15[1] + (y10 - p15[1]) * 0.4, - ] - - p10 = [h * 0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.35, y6] - - p01 = pts[1] - rand(0.08, 0.11) * (pts[1] - p0) - p04 = [ - (pts[1] - rand(0.02, 0.06) * (pts[1] - p0))[0], # x - p01[1] + (y10 - p01[1]) * 0.4, - ] # y - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - # print(pts) - return np.round(np.array(pts)).astype(np.int32) - - -def calc_poly_devin_side_1(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.9 - y6 = y10 - y8 = y10 - - p15 = pts[15] - rand(0.02, 0.06) * (pts[15] - p0) - p12 = [p15[0], p15[1] + (y10 - p15[1]) * 0.5] - - p10 = [h * 0.65, y10] # [h*0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.3, y6] # [h*0.35, y6] - - p01 = pts[1] - rand(0.01, 0.10) * ( - pts[1] - p0 - ) # pts[1] - rand(0.01, 0.10)*(pts[1] - p0) - p04 = [ - p01[0] * 0.85, - p01[1] + (y10 - p01[1]) * 0.5, - ] # [p01[0], p01[1] + (y10-p01[1])*0.5] - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -def calc_poly_devin_side_keying_1(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.9 - y6 = y10 - y8 = y10 - - # 볼을 넘어서도록 옆으로 넓힌다. - p15 = pts[15] + 0.1 * (pts[15] - p0) - p12 = [p15[0], p15[1] + (y10 - p15[1]) * 0.5] - - p10 = [h * 0.65, y10] # [h*0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.3, y6] # [h*0.35, y6] - - p01 = pts[1] - rand(0.01, 0.10) * ( - pts[1] - p0 - ) # pts[1] - rand(0.01, 0.10)*(pts[1] - p0) - p04 = [ - p01[0] * 0.85, - p01[1] + (y10 - p01[1]) * 0.5, - ] # [p01[0], p01[1] + (y10-p01[1])*0.5] - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# 말하는 것과 상관없는 마스크 만들기 -# calc_poly_pwb_side_v39_13 와 유사한데, 마스크의 맨 아래위치가 턱이 나오지 않도록 조정한다. -def calc_poly_pwb_side_v39_14(pts, h, randomness=True): - def rand(a, b): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - p0 = pts[27] - jaw = (pts[8] + (pts[8] - pts[30]) * 0.5)[1] - - y10 = min(max(h * 0.8, jaw), h * 0.95) - # y10 = min(jaw, h*0.95) - y6 = y10 - y8 = y10 - - p15 = pts[15] + rand(0.20, 0.28) * (pts[15] - p0) - p13 = pts[13] + rand(0.20, 0.28) * (pts[13] - pts[30]) - p12 = pts[12] + rand(0.20, 0.28) * (pts[12] - pts[30]) - p11 = pts[11] + rand(0.20, 0.28) * (pts[11] - pts[30]) - p11[0] = pts[11][0] + rand(0.18, 0.22) * (pts[11][0] - pts[30][0]) - - p36 = pts[36] + rand(0.12, 0.18) * (pts[36] - pts[30]) - p45 = pts[45] + rand(0.12, 0.18) * (pts[45] - pts[30]) - - p05 = pts[5] + rand(0.12, 0.18) * (pts[5] - pts[30]) - p05[0] = pts[5][0] + rand(0.18, 0.22) * (pts[5][0] - pts[30][0]) - - p04 = pts[4] + rand(0.12, 0.18) * (pts[4] - pts[30]) - p03 = pts[3] + rand(0.12, 0.18) * (pts[3] - pts[30]) - p01 = pts[1] + rand(0.12, 0.18) * (pts[1] - p0) - - p36[1] = y10 - p45[1] = y10 - - # pts = [p0, p15, p13, p45, p36, p05, p04, p03, p01] - pts = [p0, p15, p13, p45, p36, p04, p03, p01] - return np.round(np.array(pts)).astype(np.int32) - - -# ybm 누끼 전용 마스크 -def calc_poly_ybm_side_small(pts, h, randomness=True): - def rand(a, b): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - p0 = pts[27] - y10 = h * 0.80 - y6 = y10 - y8 = y10 - - p15 = pts[15] + rand(0.0, 0.1) * (pts[15] - p0) - p13 = pts[13] + rand(0.0, 0.1) * (pts[13] - pts[30]) - p12 = pts[12] + rand(0.0, 0.1) * (pts[12] - pts[30]) - p11 = pts[11] + rand(0.0, 0.1) * (pts[11] - pts[30]) - p11[0] = pts[11][0] + rand(0.0, 0.1) * (pts[11][0] - pts[30][0]) - - p10 = pts[10] + rand(0.1, 0.2) * (pts[10] - pts[30]) - p08 = pts[9] + rand(0.1, 0.1) * (pts[9] - pts[30]) - p06 = pts[8] + rand(0.1, 0.1) * (pts[8] - pts[30]) - p05 = pts[5] + rand(0.2, 0.2) * (pts[5] - pts[30]) - p05[0] = pts[5][0] + rand(0.0, 0.1) * (pts[5][0] - pts[30][0]) - - p04 = pts[4] + rand(0.0, 0.1) * (pts[4] - pts[30]) - p03 = pts[3] + rand(0.0, 0.1) * (pts[3] - pts[30]) - p01 = pts[1] + rand(0.0, 0.1) * (pts[1] - p0) - - # p11[0] = pts[11][0] - # p10[1] = y10 - # p08[1] = y10 - # p06[1] = y10 - # p05[0] = pts[5][0] - - pts = [p0, p15, p13, p12, p11, p10, p08, p06, p05, p04, p03, p01] - # pts = [p0, p15, p13, p12, p10, p08, p06, p05, p04, p03, p01] - return np.round(np.array(pts)).astype(np.int32) - - -# ybm 누끼 전용 마스크 -def calc_poly_ybm_front_small(pts, h, randomness=True): - def rand(a, b): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - p0 = pts[27] - y10 = h * 0.80 - y6 = y10 - y8 = y10 - - p15 = pts[15] + rand(0.0, 0.1) * (pts[15] - p0) - p13 = pts[13] + rand(0.0, 0.1) * (pts[13] - pts[30]) - p12 = pts[12] + rand(0.0, 0.1) * (pts[12] - pts[30]) - p11 = pts[11] + rand(0.0, 0.1) * (pts[11] - pts[30]) - p11[0] = pts[11][0] + rand(0.0, 0.1) * (pts[11][0] - pts[30][0]) - - p10 = pts[10] + rand(0.1, 0.2) * (pts[10] - pts[30]) - p08 = pts[9] + rand(0.1, 0.1) * (pts[9] - pts[30]) - p06 = pts[8] + rand(0.1, 0.1) * (pts[8] - pts[30]) - p05 = pts[5] + rand(0.2, 0.2) * (pts[5] - pts[30]) - p05[0] = pts[5][0] + rand(0.0, 0.1) * (pts[5][0] - pts[30][0]) - - p04 = pts[4] + rand(0.0, 0.1) * (pts[4] - pts[30]) - p03 = pts[3] + rand(0.0, 0.1) * (pts[3] - pts[30]) - p01 = pts[1] + rand(0.0, 0.1) * (pts[1] - p0) - - # p11[0] = pts[11][0] - # p10[1] = y10 - # p08[1] = y10 - # p06[1] = y10 - # p05[0] = pts[5][0] - - pts = [p0, p15, p13, p12, p11, p10, p08, p06, p05, p04, p03, p01] - # pts = [p0, p15, p13, p12, p10, p08, p06, p05, p04, p03, p01] - return np.round(np.array(pts)).astype(np.int32) - - -def calc_poly_devin_side_1(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.9 - y6 = y10 - y8 = y10 - - p15 = pts[15] - rand(0.02, 0.06) * (pts[15] - p0) - p12 = [p15[0], p15[1] + (y10 - p15[1]) * 0.5] - - p10 = [h * 0.65, y10] # [h*0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.3, y6] # [h*0.35, y6] - - p01 = pts[1] - rand(0.01, 0.10) * ( - pts[1] - p0 - ) # pts[1] - rand(0.01, 0.10)*(pts[1] - p0) - p04 = [ - p01[0] * 0.85, - p01[1] + (y10 - p01[1]) * 0.5, - ] # [p01[0], p01[1] + (y10-p01[1])*0.5] - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# 홍나실용 : 귀를 파고 아래쪽을 좀 올림. -def calc_poly_nasilhong_front_v1(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = max(h * 0.8, (pts[8] + (pts[8] - p0) * rand(0.05, 0.2))[1]) # y80% - y11 = (pts[8] + (pts[8] - p0) * rand(0.05, 0.2))[1] # 턱 위치와 비례 - y6 = y10 - y8 = y10 - - # 정면은, 여기에 귀가 있어서 괜찮고, - # 측면은, 배경과 맡닿게 되는 면이어서 약간 더 과하게 튀어나오게 해준다. - # 그래야 볼과 배경이 나란하게 되어 프레임에 따라 번쩍거리게 되는 문제가 없다. - p15 = pts[15] + rand(-0.04, 0.2) * (pts[15] - p0) - # p15 = pts[15] - rand(0.02, 0.06)*(pts[15] - p0) - p14 = pts[14] + rand(-0.04, 0.1) * (pts[14] - p0) - p14[1] = ((pts[14] + pts[13]) / 2)[1] # pts 13,14 중간으로 높이를 맞춘다.(귀걸이 보이게 하기위해) - p13 = pts[13] + rand(0.00, 0.2) * (pts[13] - p0) - p12 = [pts[12][0], p14[1] + (y11 - p14[1]) * 0.5] - - def get_bottom_y(y1, y2, seed): - if randomness: - if seed > 0.5: - return y1 - else: - return y2 - else: - return y1 - - seed = random.random() - p11 = [pts[9][0], y11] - p10 = [pts[9][0], get_bottom_y(y10, y11, seed)] - p08 = [pts[8][0], get_bottom_y(y10, y11, seed)] - p06 = [pts[7][0], get_bottom_y(y10, y11, seed)] - p07 = [pts[7][0], y11] - - # 정/측면 모두 여기에 귀가 있어서 괜찮다. - p01 = pts[1] - rand(0.02, 0.06) * (pts[1] - p0) - p02 = pts[2] + rand(-0.06, 0.04) * (pts[2] - p0) - p02[1] = ((pts[2] + pts[3]) / 2)[1] # pts 2,3 중간으로 높이를 맞춘다.(귀걸이 보이게 하기위해) - p03 = pts[3] + rand(0.0, 0.1) * (pts[3] - p0) - p04 = [pts[4][0], p02[1] + (y11 - p02[1]) * 0.5] - - pts = [ - p0, # 미간 - p15, - p14, - p13, - p12, # 우리가 보기에 오른쪽, p15가 위 - p11, - p10, - p08, - p06, - p07, # 맨 아래 - p04, - p03, - p02, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -def calc_poly_taejina_front_v1(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # y10 = (pts[8] + (pts[8] - pts[30]) * 0.2)[1] #(pts[8] + (pts[8] - pts[30]))[1] - # p0 = pts[27] - y10 = h * 0.8 - y6 = y10 - y8 = y10 - - p15 = pts[15] + rand(0.05, 0.10) * (pts[15] - p0) - p12 = [pts[12][0] * 1.05, pts[12][1]] + rand(0.01, 0.10) * (pts[12] - pts[30]) - - # p10 = [h*0.65, y10] - # p08 = [h*0.5, y8] - # p06 = [h*0.35, y6] - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - p04 = [pts[4][0] * 0.95, pts[4][1]] + rand(0.01, 0.10) * (pts[4] - pts[30]) - - # p10 = pts[10] + rand(0.1, 0.2)*(pts[10] - pts[30]) - p10 = [max(p12[0] * 0.8, pts[10][0]), max(y10, pts[10][1] * 1.2)] + rand( - 0.1, 0.2 - ) * (pts[10] - pts[30]) - p08 = [pts[9][0], max(y10, pts[9][1] * 1.2)] + rand(0.1, 0.1) * (pts[9] - pts[30]) - p06 = [min(p04[0] * 1.4, pts[8][0]), max(y10, pts[8][1] * 1.2)] + rand(0.1, 0.1) * ( - pts[8] - pts[30] - ) - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -def calc_poly_taejina_side_v1(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # y10 = (pts[8] + (pts[8] - pts[30]) * 0.5)[1] #(pts[8] + (pts[8] - pts[30]))[1] - # p0 = pts[27] - y10 = h * 0.8 - y6 = y10 - y8 = y10 - - p15 = pts[15] + rand(0.15, 0.20) * (pts[15] - p0) - p12 = pts[12] + rand(0.25, 0.30) * (pts[12] - pts[30]) - - # p10 = [h*0.65, y10] - # p08 = [h*0.5, y8] - # p06 = [h*0.35, y6] - p01 = pts[1] - rand(0.01, 0.10) * (pts[1] - p0) - p04 = [pts[4][0] * 0.85, pts[4][1]] + rand(0.01, 0.10) * (pts[4] - pts[30]) - - # p10 = pts[10] + rand(0.1, 0.2)*(pts[10] - pts[30]) - p10 = [max(p12[0] * 0.8, pts[10][0]), max(y10, pts[10][1] * 1.3)] + rand( - 0.1, 0.2 - ) * (pts[10] - pts[30]) - p08 = [pts[9][0], max(y10, pts[9][1] * 1.3)] + rand(0.1, 0.1) * (pts[9] - pts[30]) - p06 = [min(p04[0] * 1.4, pts[8][0]), max(y10, pts[8][1] * 1.3)] + rand(0.1, 0.1) * ( - pts[8] - pts[30] - ) - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -def calc_poly_pwb_front_v39_1_wide(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - p0 = [pts[27][0], pts[27][1] + h * 0.09] - # p0 = pts[27] - y10 = h * 0.9 - y6 = y10 - y8 = y10 - - p15 = pts[15] + rand(-0.02, 0.2) * (pts[15] - p0) - p12 = [p15[0], p15[1] + (y10 - p15[1]) * 0.5] - - p10 = [h * 0.65, y10] - p08 = [h * 0.5, y8] - p06 = [h * 0.35, y6] - - p01 = pts[1] + rand(-0.01, 0.10) * (pts[1] - p0) - p04 = [p01[0], p01[1] + (y10 - p01[1]) * 0.5] - - pts = [ - p0, # 미간 - p15, - p12, # 우리가 보기에 오른쪽, p15가 위 - p10, - p08, - p06, # 맨 아래 - p04, - p01, - ] # 우리가 보기에 왼쪽, p01 이 위, p04가 아래 - - return np.round(np.array(pts)).astype(np.int32) - - -# 네모 박스 마스크 -def calc_poly_box_1(pts, h, randomness=True): - def rand_(a, b, randomness=True): - if randomness: - r = random.random() - return a + (b - a) * r - else: - return (a + b) / 2 - - def rand(s, e): - return rand_(s, e, randomness) - - # import pdb - # pdb.set_trace() - p0 = pts[51] - pts_no_move = pts[[0, 1, 15, 16, 19, 24, 49, 53], :] - face_bottom = pts[27][1] + (max(pts[[27, 51], 1]) - min(pts[[27, 51], 1])) * 2.1 - - l, t, r, b = pts[1][0], pts[29][1], pts[15][0], face_bottom - r = r + (r - l) * 0.03 # 오른 쪽을 살짝 더 키운다. - b = max(b, max(pts[:, 1])) - - lt = [l, t] + rand(-0.1, 0.2) * ([l, t] - p0) - rt = [r, t] + rand(-0.1, 0.2) * ([r, t] - p0) - rb = [r, b] + rand(-0.1, 0.2) * ([r, b] - p0) - lb = [l, b] + rand(-0.1, 0.2) * ([l, b] - p0) - - pts = [lt, rt, rb, lb] - pts = [[min(max(x, 0), h), min(max(0, y), h)] for x, y in pts] - - return np.round(np.array(pts)).astype(np.int32) - - -calc_poly = { - 6: calc_poly_v6, - 7: calc_poly_v7, - 8: calc_poly_v8, - 9: calc_poly_v9, - 10: calc_poly_v10, - 11: calc_poly_v11, - 21: calc_poly_v21, - 22: calc_poly_v22, - 23: calc_poly_v23, - 24: calc_poly_v24, - "pwb_side_v39": calc_poly_pwb_side_v39, - "pwb_side_v39_1": calc_poly_pwb_side_v39_1, - "pwb_side_v39_11": calc_poly_pwb_side_v39_11, - "pwb_side_v39_12": calc_poly_pwb_side_v39_12, - "pwb_side_v39_13": calc_poly_pwb_side_v39_13, - "pwb_side_v39_13_1": calc_poly_pwb_side_v39_13_1, - "pwb_side_v39_13_2": calc_poly_pwb_side_v39_13_2, - "pwb_side_v39_14": calc_poly_pwb_side_v39_14, - "ybm_side_small": calc_poly_ybm_side_small, - "ybm_front_small": calc_poly_ybm_front_small, - "pwb_front_v39_0": calc_poly_pwb_front_v39_0, - "pwb_front_v39_1": calc_poly_pwb_front_v39_1, - "pwb_front_v39_1_1": calc_poly_pwb_front_v39_1_1, - "pwb_front_v39_1_wide": calc_poly_pwb_front_v39_1_wide, - "pwb_front_v39_1_2": calc_poly_pwb_front_v39_1_2, - "pwb_front_v39_1_3": calc_poly_pwb_front_v39_1_3, - "pwb_front_v39_1_4": calc_poly_pwb_front_v39_1_4, - "pwb_front_v39_1_5": calc_poly_pwb_front_v39_1_5, - "pwb_front_v39_1_6": calc_poly_pwb_front_v39_1_6, - "pwb_front_v39_1_6_1": calc_poly_pwb_front_v39_1_6_1, - "pwb_front_v39_1_7": calc_poly_pwb_front_v39_1_7, - "pwb_front_v39_1_7_1": calc_poly_pwb_front_v39_1_7_1, - "pwb_front_v39_1_7_2": calc_poly_pwb_front_v39_1_7_2, - "pwb_front_v39_1_8": calc_poly_pwb_front_v39_1_8, - "pwb_front_v39_1_8_1": calc_poly_pwb_front_v39_1_8_1, - "pwb_front_v39_1_9": calc_poly_pwb_front_v39_1_9, - "hantu_front_v39_0": calc_poly_hantu_front_v39_0, - "hunet_side_1": calc_poly_hunet_side_1, - "hantu_side_1": calc_poly_hantu_side_1, - "class101_side_1": calc_poly_class101_side_1, - "devin_side_1": calc_poly_devin_side_1, - "devin_side_keying_1": calc_poly_devin_side_keying_1, - "nasilhong_front_v1": calc_poly_nasilhong_front_v1, - "pwb_front_v39_n_22": calc_poly_pwb_front_v39_n_22, - "pwb_side_v39_n_22": calc_poly_pwb_side_v39_n_22, - "pwb_side_v39_n_22_1": calc_poly_pwb_side_v39_n_22_1, - "taejina_front_v1": calc_poly_taejina_front_v1, - "taejina_side_v1": calc_poly_taejina_side_v1, - "box_1": calc_poly_box_1, -} diff --git a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/transform_history.py b/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/transform_history.py deleted file mode 100644 index 9e2b5953a0a77a53715a8975216ab569e20df9da..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/s2f_dir/src/transform_history.py +++ /dev/null @@ -1,56 +0,0 @@ -import random - -import cv2 -import numpy as np -import torch -import torchvision.transforms as T -from PIL import Image - - -def manual_seed(rng): - if rng: - torch.manual_seed(rng) - random.seed(rng) - - -# ------------------------------------------------------------------------------ -mask_img_trsf_ver_00_jitter = T.ColorJitter( - brightness=0.3, contrast=0.2, saturation=0.4, hue=0.0 -) - - -def mask_img_trsf_ver_00(im, rng=None): - manual_seed(rng) - - if random.randint(0, 1) == 0: - im = cv2.GaussianBlur( - im, (random.randint(0, 1) * 2 + 1, random.randint(0, 1) * 2 + 1), 0, 0 - ) - - if random.randint(0, 1) == 0: - im = Image.fromarray(im) - im = mask_img_trsf_ver_00_jitter(im) - im = np.array(im) - return im - - -# ------------------------------------------------------------------------------ -mask_img_trsf_ver_01_jitter = T.ColorJitter( - brightness=0.15, contrast=0.1, saturation=0.2, hue=0.0 -) - - -def mask_img_trsf_ver_01(im, rng=None): - manual_seed(rng) - - # if random.randint(0, 1) == 0: - # im = cv2.GaussianBlur(im, (random.randint(0,1)*2+1,random.randint(0,1)*2+1), 0, 0) - - if random.randint(0, 1) == 0: - im = Image.fromarray(im) - im = mask_img_trsf_ver_01_jitter(im) - im = np.array(im) - return im - - -mask_img_trsfs = {0: mask_img_trsf_ver_00, 1: mask_img_trsf_ver_01} diff --git a/stf/stf-api-alternative/src/stf_alternative/template.py b/stf/stf-api-alternative/src/stf_alternative/template.py deleted file mode 100644 index 2b12642423ccc801e0e363e7d1c779c594273265..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/template.py +++ /dev/null @@ -1,419 +0,0 @@ -import asyncio -import json -from pathlib import Path - -import asyncstdlib -import numpy as np -import pandas as pd -from pydub import AudioSegment - -from stf_alternative.compose import get_compose_func_without_keying, get_keying_func -from stf_alternative.dataset import LipGanAudio, LipGanImage, LipGanRemoteImage -from stf_alternative.inference import ( - adictzip, - ainference_model_remote, - audio_encode, - dictzip, - get_head_box, - inference_model, - inference_model_remote, -) -from stf_alternative.preprocess_dir.utils import face_finder as ff -from stf_alternative.readers import ( - AsyncProcessPoolBatchIterator, - ProcessPoolBatchIterator, - get_image_folder_async_process_reader, - get_image_folder_process_reader, -) -from stf_alternative.util import ( - acycle, - get_crop_mp4_dir, - get_frame_dir, - get_preprocess_dir, - icycle, - read_config, -) - - -def calc_audio_std(audio_segment): - sample = np.array(audio_segment.get_array_of_samples(), dtype=np.int16) - max_value = np.iinfo( - np.int8 - if audio_segment.sample_width == 1 - else np.int16 - if audio_segment.sample_width == 2 - else np.int32 - ).max - return sample.std() / max_value, len(sample) - - -class RunningAudioNormalizer: - def __init__(self, ref_audio_segment, decay_rate=0.01): - self.ref_std, _ = calc_audio_std(ref_audio_segment) - self.running_var = np.float64(0) - self.running_cnt = 0 - self.decay_rate = decay_rate - - def __call__(self, audio_segment): - std, cnt = calc_audio_std(audio_segment) - self.running_var = (self.running_var + (std**2) * cnt) * (1 - self.decay_rate) - self.running_cnt = (self.running_cnt + cnt) * (1 - self.decay_rate) - - return audio_segment._spawn( - (audio_segment.get_array_of_samples() / self.std * self.ref_std) - .astype(np.int16) - .tobytes() - ) - - @property - def std(self): - return np.sqrt(self.running_var / self.running_cnt) - - -def get_video_metadata(preprocess_dir): - json_path = preprocess_dir / "metadata.json" - with open(json_path, "r") as f: - return json.load(f) - - -class Template: - def __init__( - self, - config_path, - model, - template_video_path, - wav_std=False, - ref_wav=None, - verbose=False, - ): - self.config = read_config(config_path) - self.model = model - - self.template_video_path = Path(template_video_path) - self.preprocess_dir = Path( - get_preprocess_dir(model.work_root_path, model.args.name) - ) - - self.crop_mp4_dir = Path( - get_crop_mp4_dir(self.preprocess_dir, template_video_path) - ) - self.dataset_dir = self.crop_mp4_dir / f"{Path(template_video_path).stem}_000" - - self.template_frames_path = Path( - get_frame_dir(self.preprocess_dir, template_video_path, ratio=1.0) - ) - self.verbose = verbose - self.remote = self.model.args.model_type == "remote" - - self.audio_normalizer = ( - RunningAudioNormalizer(ref_wav) if wav_std else lambda x: x - ) - self.df = pd.read_pickle(self.dataset_dir / "df_fan.pickle") - - metadata = get_video_metadata(self.preprocess_dir) - self.fps = metadata["fps"] - self.width, self.height = metadata["width"], metadata["height"] - - self.keying_func = get_keying_func(self) - self.compose_func = get_compose_func_without_keying(self, ratio=1.0) - - self.move = "move" in self.config.keys() and self.config.move - - self.inference_func = inference_model_remote if self.remote else inference_model - self.batch_size = self.model.args.batch_size - self.unit = 1000 / self.fps - - def _get_reader(self, num_skip_frames): - assert self.template_frames_path.exists() - return get_image_folder_process_reader( - data_path=self.template_frames_path, - num_skip_frames=num_skip_frames, - preload=self.batch_size, - ) - - def _get_local_face_dataset(self, num_skip_frames): - return LipGanImage( - args=self.model.args, - path=self.dataset_dir, - num_skip_frames=num_skip_frames, - ) - - def _get_remote_face_dataset(self, num_skip_frames): - return LipGanRemoteImage( - args=self.model.args, - path=self.dataset_dir, - num_skip_frames=num_skip_frames, - ) - - def _get_mel_dataset(self, audio_segment): - image_count = round( - audio_segment.duration_seconds * self.fps - ) # 패딩 했기 때문에 batch_size로 나뉜다 - ids = list(range(image_count)) - - mel = audio_encode( - model=self.model, - audio_segment=audio_segment, - device=self.model.device, - ) - - return LipGanAudio( - args=self.model.args, - id_list=ids, - mel=mel, - fps=self.fps, - ) - - def _get_face_dataset(self, num_skip_frames): - if self.remote: - return self._get_remote_face_dataset(num_skip_frames=num_skip_frames) - else: - return self._get_local_face_dataset(num_skip_frames=num_skip_frames) - - def _wrap_reader(self, reader): - reader = icycle(reader) - return reader - - def _wrap_dataset(self, dataset): - dataloader = ProcessPoolBatchIterator( - dataset=dataset, - batch_size=self.batch_size, - ) - return dataloader - - def get_reader(self, num_skip_frames=0): - reader = self._get_reader(num_skip_frames=num_skip_frames) - reader = self._wrap_reader(reader) - return reader - - def get_mel_loader(self, audio_segment): - mel_dataset = self._get_mel_dataset(audio_segment) - return self._wrap_dataset(mel_dataset) - - def get_face_loader(self, num_skip_frames=0): - face_dataset = self._get_face_dataset(num_skip_frames=num_skip_frames) - return self._wrap_dataset(face_dataset) # need cycle - - # padding according to batch size. - def pad(self, audio_segment): - num_frames = audio_segment.duration_seconds * self.fps - pad = AudioSegment.silent( - (self.batch_size - (num_frames % self.batch_size)) * (1000 / self.fps) - ) - return audio_segment + pad - - def _prepare_data( - self, - audio_segment, - video_start_offset_frame, - ): - video_start_offset_frame = video_start_offset_frame % len(self.df) - padded = self.pad(audio_segment) - - face_dataset = self._get_face_dataset(num_skip_frames=video_start_offset_frame) - mel_dataset = self._get_mel_dataset(audio_segment=padded) - - n_frames = len(mel_dataset) - assert n_frames % self.batch_size == 0 - - face_loader = self._wrap_dataset(face_dataset) - mel_loader = self._wrap_dataset(mel_dataset) - return padded, face_loader, mel_loader - - def gen_infer( - self, - audio_segment, - video_start_offset_frame, - ): - padded, face_loader, mel_loader = self._prepare_data( - audio_segment=audio_segment, - video_start_offset_frame=video_start_offset_frame, - ) - - for i, v in enumerate(dictzip(iter(mel_loader), iter(face_loader))): - inferred = self.inference_func(self.model, v, self.model.device) - - for j, it in enumerate(inferred): - chunk_pivot = i * self.unit * self.batch_size + j * self.unit - chunk = padded[chunk_pivot : chunk_pivot + self.unit] - yield it, chunk - - def gen_infer_batch( - self, - audio_segment, - video_start_offset_frame, - ): - padded, face_loader, mel_loader = self._prepare_data( - audio_segment=audio_segment, - video_start_offset_frame=video_start_offset_frame, - ) - - for i, v in enumerate(dictzip(iter(mel_loader), iter(face_loader))): - inferred = self.inference_func(self.model, v, self.model.device) - yield inferred, padded[ - i * self.unit * self.batch_size : (i + 1) * self.unit * self.batch_size - ] - - def gen_infer_batch_future( - self, - pool, - audio_segment, - video_start_offset_frame, - ): - padded, face_loader, mel_loader = self._prepare_data( - audio_segment=audio_segment, - video_start_offset_frame=video_start_offset_frame, - ) - - futures = [] - for i, v in enumerate(dictzip(iter(mel_loader), iter(face_loader))): - futures.append( - pool.submit(self.inference_func, self.model, v, self.model.device) - ) - - for i, future in enumerate(futures): - yield future, padded[ - i * self.unit * self.batch_size : (i + 1) * self.unit * self.batch_size - ] - - def gen_infer_concurrent( - self, - pool, - audio_segment, - video_start_offset_frame, - ): - for future, chunk in self.gen_infer_batch_future( - pool, audio_segment, video_start_offset_frame - ): - for i, inferred in enumerate(future.result()): - yield inferred, chunk[i * self.unit : (i + 1) * self.unit] - - def compose( - self, - idx, - frame, - output, - ): - head_box_idx = idx % len(self.df) - head_box = get_head_box( - self.df, - move=self.move, - head_box_idx=head_box_idx, - ) - alpha2 = self.keying_func(output, head_box_idx, head_box) - frame = self.compose_func(alpha2, frame[:, :, :4], head_box_idx) - return frame - - def gen_frames( - self, - audio_segment, - video_start_offset_frame, - reader=None, - ): - reader = reader or self.get_reader(num_skip_frames=video_start_offset_frame) - gen_infer = self.gen_infer(audio_segment, video_start_offset_frame) - - for idx, ((o, a), f) in enumerate( - zip(gen_infer, reader), video_start_offset_frame - ): - composed = self.compose(idx, f, o) - yield composed, a - - def gen_frames_concurrent( - self, - pool, - audio_segment, - video_start_offset_frame, - reader=None, - ): - reader = reader or self.get_reader(num_skip_frames=video_start_offset_frame) - gen_infer = self.gen_infer_concurrent( - pool, - audio_segment, - video_start_offset_frame, - ) - - for idx, ((o, a), f) in enumerate( - zip(gen_infer, reader), video_start_offset_frame - ): - yield self.compose(idx, f, o), a - - -class AsyncTemplate(Template): - async def agen_infer_batch_future( - self, - pool, - audio_segment, - video_start_offset_frame, - ): - assert self.remote - - padded, face_loader, mel_loader = await self._aprepare_data( - pool, - audio_segment=audio_segment, - video_start_offset_frame=video_start_offset_frame, - ) - - futures = [] - async for i, v in asyncstdlib.enumerate( - adictzip(aiter(mel_loader), aiter(face_loader)) - ): - futures.append( - asyncio.create_task( - ainference_model_remote(pool, self.model, v, self.model.device) - ) - ) - - for i, future in enumerate(futures): - yield future, padded[ - i * self.unit * self.batch_size : (i + 1) * self.unit * self.batch_size - ] - - async def _awrap_dataset(self, dataset): - dataloader = AsyncProcessPoolBatchIterator( - dataset=dataset, - batch_size=self.batch_size, - ) - return dataloader - - async def _aprepare_data( - self, - pool, - audio_segment, - video_start_offset_frame, - ): - video_start_offset_frame = video_start_offset_frame % len(self.df) - padded = self.pad(audio_segment) - - loop = asyncio.get_running_loop() - - face_dataset, mel_dataset = await asyncio.gather( - loop.run_in_executor( - pool, self._get_face_dataset, video_start_offset_frame - ), - loop.run_in_executor(pool, self._get_mel_dataset, padded), - ) - - n_frames = len(mel_dataset) - assert n_frames % self.batch_size == 0 - - face_loader = await self._awrap_dataset(face_dataset) - mel_loader = await self._awrap_dataset(mel_dataset) - return padded, face_loader, mel_loader - - def _aget_reader(self, num_skip_frames): - assert self.template_frames_path.exists() - return get_image_folder_async_process_reader( - data_path=self.template_frames_path, - num_skip_frames=num_skip_frames, - preload=self.batch_size, - ) - - def _awrap_reader(self, reader): - reader = acycle(reader) - return reader - - def aget_reader(self, num_skip_frames=0): - reader = self._aget_reader(num_skip_frames=num_skip_frames) - reader = self._awrap_reader(reader) - return reader diff --git a/stf/stf-api-alternative/src/stf_alternative/util.py b/stf/stf-api-alternative/src/stf_alternative/util.py deleted file mode 100644 index e1cd2ea57885324d07a2f282b46724e714ce649f..0000000000000000000000000000000000000000 --- a/stf/stf-api-alternative/src/stf_alternative/util.py +++ /dev/null @@ -1,276 +0,0 @@ -import json -import random -import string -from datetime import datetime -from pathlib import Path - -import ffmpeg -import imageio_ffmpeg -import numpy as np -import torch -from addict import Dict - - -def icycle(iterable): - while True: - for it in iterable: - yield it - - -async def acycle(aiterable): - while True: - async for it in aiterable: - yield it - - -def read_config(config_path): - try: - with open(config_path) as fd: - conf = json.load(fd) - conf = Dict(conf) - except Exception as e: - print("read config exception in ", config_path) - raise e - return conf - - -def get_preprocess_dir(work_root_path, name): - return str(Path(work_root_path) / "preprocess" / name) - - -def get_crop_mp4_dir(preprocess_dir, video_path): - return f"{preprocess_dir}/crop_video_{Path(video_path).stem}" - - -def get_frame_dir(preprocess_dir, video_path, ratio): - ratio_s = "" if ratio == 1.0 else f"_{ratio}" - return f"{preprocess_dir}/{Path(video_path).stem}/frames{ratio_s}" - - -def get_template_ratio_file_path(preprocess_dir, video_path, ratio): - if ratio == 1.0: - return video_path - - root_path = f"{preprocess_dir}/{Path(video_path).name}" - return f"{root_path}/{Path(video_path).name}_ratio_{ratio}{Path(video_path).suffix}" - - -class _CallBack(object): - def __init__(self, callback, min_per, max_per, desc, verbose=False): - assert max_per > min_per - self.callback = callback - self.min_per = min_per - self.max_per = max_per - if isinstance(callback, _CallBack): - self.desc = callback.desc + "/" + desc - else: - self.desc = desc - self.last_per = -1 - self.verbose = verbose - self.callback_interval = 1 - - def __call__(self, per): - if self.callback is None: - return - my_per = self.min_per + (per + 1) / 100.0 * (self.max_per - self.min_per) - my_per = int(my_per) - if my_per - self.last_per >= self.callback_interval: - # if self.verbose: - # print(self.desc, ' : ', my_per) - self.callback(my_per) - self.last_per = my_per - - -def callback_inter(callback, min_per=0, max_per=100, desc="", verbose=False): - assert min_per >= 0 and max_per >= 0 and max_per > min_per - return _CallBack(callback, min_per, max_per, desc, verbose=verbose) - - -def callback_test(): - def callback(per): - print("real callback", per) - - callback1 = callback_inter(callback, min_per=0, max_per=50, desc="1") - callback2 = callback_inter(callback, min_per=50, max_per=90, desc="2") - callback3 = callback_inter(callback, min_per=90, max_per=100, desc="3") - # for i in range(0,101,10): - # callback1(i) - - callback11 = callback_inter(callback1, min_per=0, max_per=20, desc="a") - callback12 = callback_inter(callback1, min_per=20, max_per=80, desc="b") - callback13 = callback_inter(callback1, min_per=80, max_per=100, desc="c") - - for i in range(0, 101, 1): - callback11(i) - for i in range(0, 101, 1): - callback12(i) - for i in range(0, 101, 1): - callback13(i) - - for i in range(0, 101, 1): - callback2(i) - for i in range(0, 101, 1): - callback3(i) - - -def fix_seed(random_seed): - """ - fix seed to control any randomness from a code - (enable stability of the experiments' results.) - """ - torch.manual_seed(random_seed) - torch.cuda.manual_seed(random_seed) - torch.cuda.manual_seed_all(random_seed) # if use multi-GPU - torch.backends.cudnn.deterministic = True - torch.backends.cudnn.benchmark = False - np.random.seed(random_seed) - random.seed(random_seed) - - -def seed_worker(worker_id): - worker_seed = torch.initial_seed() % 2**32 - np.random.seed(worker_seed) - random.seed(worker_seed) - - -def get_three_channel_ffmpeg_reader(path): - reader = imageio_ffmpeg.read_frames(path) - meta = reader.__next__() # meta data, e.g. meta["size"] -> (width, height) - return reader, meta - - -def get_four_channel_ffmpeg_reader(path): - if path.endswith(".mov"): - reader = imageio_ffmpeg.read_frames( - str(path), pix_fmt="rgba", bits_per_pixel=32 - ) - elif path.endswith(".webm"): - stream_meta = [ - it - for it in ffmpeg.probe(str(path))["streams"] - if it["codec_type"] == "video" - ][0] - reader = imageio_ffmpeg.read_frames( - path=str(path), - pix_fmt="rgba", - input_params=["-c:v", "libvpx-vp9"] - if stream_meta["codec_name"] == "vp9" - else ["-c:v", "libvpx"], - bits_per_pixel=32, - ) - - meta = reader.__next__() # meta data, e.g. meta["size"] -> (width, height) - return reader, meta - - -def get_three_channel_ffmpeg_writer(out_path, size, fps, ffmpeg_params, wav_path): - writer = imageio_ffmpeg.write_frames( - out_path, - size=size, - fps=fps, - ffmpeg_log_level="error", - quality=10, # 0~10 - output_params=ffmpeg_params, - audio_path=wav_path, - macro_block_size=1, - ) - return writer - - -def get_webm_ffmpeg_writer(out_path, size, fps, wav_path, low_quality=False): - writer = imageio_ffmpeg.write_frames( - out_path, - size=size, - fps=fps / 2 if low_quality else fps, - ffmpeg_log_level="error", - quality=10, # 0~10 - # hojin - pix_fmt_in="rgba", - pix_fmt_out="yuva420p", - codec="libvpx", - bitrate="10M", - output_params=["-crf", "4", "-auto-alt-ref", "0"] - + (["-deadline", "realtime"] if low_quality else []), - # output_params=['-b','37800k', '-vf', 'hflip'], # 좌우 반전 테스트 (완료) - # hojin end - audio_path=wav_path, - macro_block_size=1, - ) - return writer - - -def get_mov_ffmpeg_writer(out_path, size, fps, wav_path): - writer = imageio_ffmpeg.write_frames( - out_path, - size=size, - fps=fps, - ffmpeg_log_level="error", - quality=10, # 0~10 - pix_fmt_in="rgba", - pix_fmt_out="yuva444p10le", - # codec="prores_ks", - output_params=[ - "-c:v", - "prores_ks", - "-profile:v", - "4", - "-vendor", - "apl0", - "-bits_per_mb", - "8000", - ], - audio_path=wav_path, - macro_block_size=1, - ) - return writer - - -def get_reader(template_video_path): - # document : https://github.com/imageio/imageio-ffmpeg - if template_video_path.endswith(".mp4"): - reader, meta = get_three_channel_ffmpeg_reader(template_video_path) - elif template_video_path.endswith(".mov") or template_video_path.endswith(".webm"): - reader, meta = get_four_channel_ffmpeg_reader(template_video_path) - else: - assert False - return reader, meta - - -def get_writer(out_path, size, fps, wav_path, slow_write): - if out_path.endswith(".mp4"): - # 합성하면서 비디오 생성 - ffmpeg_params = None - if slow_write: - # ffmpeg_params=['-acodec', 'aac', '-preset', 'veryslow', '-crf', '17'] - ffmpeg_params = ["-acodec", "aac", "-crf", "17"] - writer = get_three_channel_ffmpeg_writer( - out_path, size, fps, ffmpeg_params, wav_path - ) - elif out_path.endswith(".mov"): - writer = get_mov_ffmpeg_writer(out_path, size, fps, wav_path) - elif out_path.endswith(".webm"): - writer = get_webm_ffmpeg_writer( - out_path, size, fps, wav_path - ) # webm fps 변경한다.(속도를 위해) - else: - print('out_path should one of ["mp4", "webm"]') - assert False - return writer - - -def pretty_string_dict(d, tab=4): - s = ["{\n"] - for k, v in d.items(): - if isinstance(v, dict): - v = pretty_string_dict(v, tab + 1) - else: - v = repr(v) - - s.append("%s%r: %s,\n" % (" " * tab, k, v)) - s.append("%s}" % (" " * tab)) - return "".join(s) - - -def get_random_string_with_len(size: int): - time_str = datetime.now().strftime("%y%m%d_%H%M%S_") - return "".join([time_str] + random.choices(string.ascii_letters, k=size)) diff --git a/stf/stf-api-tools/.gitignore b/stf/stf-api-tools/.gitignore deleted file mode 100644 index 68bc17f9ff2104a9d7b6777058bb4c343ca72609..0000000000000000000000000000000000000000 --- a/stf/stf-api-tools/.gitignore +++ /dev/null @@ -1,160 +0,0 @@ -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -share/python-wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.nox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -*.py,cover -.hypothesis/ -.pytest_cache/ -cover/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py -db.sqlite3 -db.sqlite3-journal - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -.pybuilder/ -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# IPython -profile_default/ -ipython_config.py - -# pyenv -# For a library or package, you might want to ignore these files since the code is -# intended to run in multiple environments; otherwise, check them in: -# .python-version - -# pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not -# install all needed dependencies. -#Pipfile.lock - -# poetry -# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. -# This is especially recommended for binary packages to ensure reproducibility, and is more -# commonly ignored for libraries. -# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control -#poetry.lock - -# pdm -# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. -#pdm.lock -# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it -# in version control. -# https://pdm.fming.dev/#use-with-ide -.pdm.toml - -# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm -__pypackages__/ - -# Celery stuff -celerybeat-schedule -celerybeat.pid - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ -.dmypy.json -dmypy.json - -# Pyre type checker -.pyre/ - -# pytype static type analyzer -.pytype/ - -# Cython debug symbols -cython_debug/ - -# PyCharm -# JetBrains specific template is maintained in a separate JetBrains.gitignore that can -# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore -# and can be added to the global gitignore or merged into this file. For a more nuclear -# option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ diff --git a/stf/stf-api-tools/.ipynb_checkpoints/README-checkpoint.md b/stf/stf-api-tools/.ipynb_checkpoints/README-checkpoint.md deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/stf/stf-api-tools/.ipynb_checkpoints/pyproject-checkpoint.toml b/stf/stf-api-tools/.ipynb_checkpoints/pyproject-checkpoint.toml deleted file mode 100644 index b5cd4ccd5977ebc151e17183577df36be72327a7..0000000000000000000000000000000000000000 --- a/stf/stf-api-tools/.ipynb_checkpoints/pyproject-checkpoint.toml +++ /dev/null @@ -1,14 +0,0 @@ -[tool.poetry] -name = "stf-tools" -version = "0.1.0" -description = "stf-alternative tools" -authors = ["Kim Minjong "] -readme = "README.md" -packages = [ - {include = "stf_tools", from="src"} -] - - -[build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" diff --git a/stf/stf-api-tools/README.md b/stf/stf-api-tools/README.md deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/stf/stf-api-tools/poetry.lock b/stf/stf-api-tools/poetry.lock deleted file mode 100644 index e6e2be38c26b43b4361ab4ad0e99e219a5b17f58..0000000000000000000000000000000000000000 --- a/stf/stf-api-tools/poetry.lock +++ /dev/null @@ -1,7 +0,0 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. -package = [] - -[metadata] -lock-version = "2.0" -python-versions = "*" -content-hash = "115cf985d932e9bf5f540555bbdd75decbb62cac81e399375fc19f6277f8c1d8" diff --git a/stf/stf-api-tools/pyproject.toml b/stf/stf-api-tools/pyproject.toml deleted file mode 100644 index b5cd4ccd5977ebc151e17183577df36be72327a7..0000000000000000000000000000000000000000 --- a/stf/stf-api-tools/pyproject.toml +++ /dev/null @@ -1,14 +0,0 @@ -[tool.poetry] -name = "stf-tools" -version = "0.1.0" -description = "stf-alternative tools" -authors = ["Kim Minjong "] -readme = "README.md" -packages = [ - {include = "stf_tools", from="src"} -] - - -[build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" diff --git a/stf/stf-api-tools/src/stf_tools/.ipynb_checkpoints/__init__-checkpoint.py b/stf/stf-api-tools/src/stf_tools/.ipynb_checkpoints/__init__-checkpoint.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/stf/stf-api-tools/src/stf_tools/.ipynb_checkpoints/build_template-checkpoint.py b/stf/stf-api-tools/src/stf_tools/.ipynb_checkpoints/build_template-checkpoint.py deleted file mode 100644 index 523066d4775c749a3b324d807e4fcd99aaf5d820..0000000000000000000000000000000000000000 --- a/stf/stf-api-tools/src/stf_tools/.ipynb_checkpoints/build_template-checkpoint.py +++ /dev/null @@ -1,200 +0,0 @@ -import pathlib -import subprocess -import tempfile - -import av -import numpy as np -from PIL import Image - - -def alpha_crop_detect(path): - result = subprocess.check_output( - [ - "bash", - "-c", - f"""ffmpeg -c:v libvpx -i {path} -filter_complex "[0:v]alphaextract, cropdetect=limit=0:round=16:reset=0" -f null - 2>&1 | grep -oP 'crop=\K\d+:\d+:\d+:\d+' """, - ] - ) - return result.decode().strip().split("\n")[-1] - - -def crop_resize_overlay( - path, background_path, range, out, left=0.5, top=0.15, height=0.85, crf=17 -): - with av.open(path, "r") as f: - fps = f.streams.video[0].base_rate - - with av.open(background_path, "r") as f: - background_width, background_height = ( - f.streams.video[0].width, - f.streams.video[0].height, - ) - - if isinstance(top, float): - top = int(background_height * top) - - if isinstance(height, float): - height = int(background_height * height) - - height -= height % 2 - - w, h, _, _ = map(int, range.split(":")) - width = int(height / h * w) - width -= width % 2 - - if isinstance(left, float): - left = int(background_width * left) - width // 2 - - subprocess.call( - [ - "bash", - "-c", - f"""ffmpeg -y -c:v libvpx -r {fps} -i {path} -r {fps} -i {background_path} -filter_complex "[0:v]crop={range},scale={width}:{height} [vidi]; [1:v][vidi] overlay={left}:{top}" -crf {crf} -pix_fmt yuva420p -c:v libvpx-vp9 -c:a copy {out}""", - ] - ) - - return background_width, background_height, int(fps), (left, top, height) - - -import json -import os -import shutil -import tempfile -from pathlib import Path - -import av -import pandas as pd -import stf_alternative -from stf_alternative.util import get_crop_mp4_dir, get_frame_dir, get_preprocess_dir - -from stf_tools.silent import create_silent_video -from stf_tools.writers import WebmWriter - - -def create_template( - template_video_path, - background_path, - out_path, - config_path, - reference_face, - work_root_path, - checkpoint_path, - left, - top, - height, - crf=17, -): - crop_range = alpha_crop_detect(template_video_path) - result_width, result_height, fps, (left, top, height) = crop_resize_overlay( - template_video_path, - background_path, - crop_range, - out_path, - left=left, - top=top, - height=height, - crf=crf, - ) - - stf_alternative.preprocess_template( - config_path=config_path, - template_video_path=template_video_path, - reference_face=reference_face, - work_root_path=work_root_path, - template_frame_ratio=1.0, - template_video_ratio=[1.0], - silent_video_path=None, - callback=None, - device="cuda:0", - verbose=True, - save_frames=False, - ) - - model = stf_alternative.create_model( - config_path=config_path, - checkpoint_path=checkpoint_path, - work_root_path=work_root_path, - device="cuda:0", - verbose=True, - wavlm_path="microsoft/wavlm-large", - ) - - preprocess_dir = Path(get_preprocess_dir(work_root_path, model.args.name)) - crop_mp4_dir = Path(get_crop_mp4_dir(preprocess_dir, template_video_path)) - dataset_dir = crop_mp4_dir / f"{Path(template_video_path).stem}_000" - template_frames_path = Path( - get_frame_dir(preprocess_dir, template_video_path, ratio=1.0) - ) - - with open(preprocess_dir / "metadata.json", "w") as f: - json.dump( - { - "fps": fps, - "width": result_width, - "height": result_height, - }, - f, - ) - - df = pd.read_pickle(dataset_dir / "df_fan.pickle") - - w, h, x, y = map(int, crop_range.split(":")) - scale = height / h - - id_set = set() - for it in df["cropped_box"]: - if id(it) in id_set: - continue - id_set.add(id(it)) - x1, y1, x2, y2 = it - x1 = (x1 - x) * scale + left - x2 = (x2 - x) * scale + left - y1 = (y1 - y) * scale + top - y2 = (y2 - y) * scale + top - it[:] = (x1, y1, x2, y2) - - df.to_pickle(dataset_dir / "df_fan.pickle") - - template_frames_path.mkdir(exist_ok=True, parents=True) - with av.open(out_path) as container: - for frame in container.decode(video=0): - Image.fromarray(frame.to_ndarray(format="rgb24"), mode="RGB").save( - f"{template_frames_path}/%05d.webp" % frame.index, - format="webp", - lossless=True, - ) - - with tempfile.TemporaryDirectory() as tempdir: - silent_video_path = f"{tempdir}/silent.webm" - template = stf_alternative.Template( - config_path=config_path, - model=model, - template_video_path=template_video_path, - wav_std=False, - ref_wav=None, - verbose=True, - ) - writer = WebmWriter( - silent_video_path, - width=result_width, - height=result_height, - fps=fps, - crf=crf, - audio_sample_rate=16000, - quiet=False, - ) - create_silent_video(template, writer) - - silent_frames_path = Path( - get_frame_dir(preprocess_dir, silent_video_path, ratio=1.0) - ) - silent_frames_path.mkdir(exist_ok=True, parents=True) - with av.open(silent_video_path) as container: - for frame in container.decode(video=0): - Image.fromarray(frame.to_ndarray(format="rgb24"), mode="RGB").save( - f"{silent_frames_path}/%05d.webp" % frame.index, - format="webp", - lossless=True, - ) - shutil.rmtree(template_frames_path, ignore_errors=False) - silent_frames_path.rename(template_frames_path) diff --git a/stf/stf-api-tools/src/stf_tools/.ipynb_checkpoints/silent-checkpoint.py b/stf/stf-api-tools/src/stf_tools/.ipynb_checkpoints/silent-checkpoint.py deleted file mode 100644 index db1255841b5be8ef9b251047b34861db75df5d19..0000000000000000000000000000000000000000 --- a/stf/stf-api-tools/src/stf_tools/.ipynb_checkpoints/silent-checkpoint.py +++ /dev/null @@ -1,28 +0,0 @@ -from concurrent.futures import ThreadPoolExecutor - -from pydub import AudioSegment - - -def create_silent_video(template, writer): - reader = iter(template._get_reader(num_skip_frames=0)) - audio_segment = AudioSegment.silent(10000) - pivot = 0 - - with ThreadPoolExecutor(4) as p: - try: - while True: - gen_infer = template.gen_infer_concurrent( - p, - audio_segment, - pivot, - ) - for idx, (it, chunk) in enumerate(gen_infer, pivot): - frame = next(reader) - composed = template.compose(idx, frame, it) - writer.video_writer.write(composed) - writer.audio_writer.write(chunk) - pivot = idx + 1 - except StopIteration as e: - pass - - writer.finish(forced=False) diff --git a/stf/stf-api-tools/src/stf_tools/__init__.py b/stf/stf-api-tools/src/stf_tools/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/stf/stf-api-tools/src/stf_tools/build_template.py b/stf/stf-api-tools/src/stf_tools/build_template.py deleted file mode 100644 index 523066d4775c749a3b324d807e4fcd99aaf5d820..0000000000000000000000000000000000000000 --- a/stf/stf-api-tools/src/stf_tools/build_template.py +++ /dev/null @@ -1,200 +0,0 @@ -import pathlib -import subprocess -import tempfile - -import av -import numpy as np -from PIL import Image - - -def alpha_crop_detect(path): - result = subprocess.check_output( - [ - "bash", - "-c", - f"""ffmpeg -c:v libvpx -i {path} -filter_complex "[0:v]alphaextract, cropdetect=limit=0:round=16:reset=0" -f null - 2>&1 | grep -oP 'crop=\K\d+:\d+:\d+:\d+' """, - ] - ) - return result.decode().strip().split("\n")[-1] - - -def crop_resize_overlay( - path, background_path, range, out, left=0.5, top=0.15, height=0.85, crf=17 -): - with av.open(path, "r") as f: - fps = f.streams.video[0].base_rate - - with av.open(background_path, "r") as f: - background_width, background_height = ( - f.streams.video[0].width, - f.streams.video[0].height, - ) - - if isinstance(top, float): - top = int(background_height * top) - - if isinstance(height, float): - height = int(background_height * height) - - height -= height % 2 - - w, h, _, _ = map(int, range.split(":")) - width = int(height / h * w) - width -= width % 2 - - if isinstance(left, float): - left = int(background_width * left) - width // 2 - - subprocess.call( - [ - "bash", - "-c", - f"""ffmpeg -y -c:v libvpx -r {fps} -i {path} -r {fps} -i {background_path} -filter_complex "[0:v]crop={range},scale={width}:{height} [vidi]; [1:v][vidi] overlay={left}:{top}" -crf {crf} -pix_fmt yuva420p -c:v libvpx-vp9 -c:a copy {out}""", - ] - ) - - return background_width, background_height, int(fps), (left, top, height) - - -import json -import os -import shutil -import tempfile -from pathlib import Path - -import av -import pandas as pd -import stf_alternative -from stf_alternative.util import get_crop_mp4_dir, get_frame_dir, get_preprocess_dir - -from stf_tools.silent import create_silent_video -from stf_tools.writers import WebmWriter - - -def create_template( - template_video_path, - background_path, - out_path, - config_path, - reference_face, - work_root_path, - checkpoint_path, - left, - top, - height, - crf=17, -): - crop_range = alpha_crop_detect(template_video_path) - result_width, result_height, fps, (left, top, height) = crop_resize_overlay( - template_video_path, - background_path, - crop_range, - out_path, - left=left, - top=top, - height=height, - crf=crf, - ) - - stf_alternative.preprocess_template( - config_path=config_path, - template_video_path=template_video_path, - reference_face=reference_face, - work_root_path=work_root_path, - template_frame_ratio=1.0, - template_video_ratio=[1.0], - silent_video_path=None, - callback=None, - device="cuda:0", - verbose=True, - save_frames=False, - ) - - model = stf_alternative.create_model( - config_path=config_path, - checkpoint_path=checkpoint_path, - work_root_path=work_root_path, - device="cuda:0", - verbose=True, - wavlm_path="microsoft/wavlm-large", - ) - - preprocess_dir = Path(get_preprocess_dir(work_root_path, model.args.name)) - crop_mp4_dir = Path(get_crop_mp4_dir(preprocess_dir, template_video_path)) - dataset_dir = crop_mp4_dir / f"{Path(template_video_path).stem}_000" - template_frames_path = Path( - get_frame_dir(preprocess_dir, template_video_path, ratio=1.0) - ) - - with open(preprocess_dir / "metadata.json", "w") as f: - json.dump( - { - "fps": fps, - "width": result_width, - "height": result_height, - }, - f, - ) - - df = pd.read_pickle(dataset_dir / "df_fan.pickle") - - w, h, x, y = map(int, crop_range.split(":")) - scale = height / h - - id_set = set() - for it in df["cropped_box"]: - if id(it) in id_set: - continue - id_set.add(id(it)) - x1, y1, x2, y2 = it - x1 = (x1 - x) * scale + left - x2 = (x2 - x) * scale + left - y1 = (y1 - y) * scale + top - y2 = (y2 - y) * scale + top - it[:] = (x1, y1, x2, y2) - - df.to_pickle(dataset_dir / "df_fan.pickle") - - template_frames_path.mkdir(exist_ok=True, parents=True) - with av.open(out_path) as container: - for frame in container.decode(video=0): - Image.fromarray(frame.to_ndarray(format="rgb24"), mode="RGB").save( - f"{template_frames_path}/%05d.webp" % frame.index, - format="webp", - lossless=True, - ) - - with tempfile.TemporaryDirectory() as tempdir: - silent_video_path = f"{tempdir}/silent.webm" - template = stf_alternative.Template( - config_path=config_path, - model=model, - template_video_path=template_video_path, - wav_std=False, - ref_wav=None, - verbose=True, - ) - writer = WebmWriter( - silent_video_path, - width=result_width, - height=result_height, - fps=fps, - crf=crf, - audio_sample_rate=16000, - quiet=False, - ) - create_silent_video(template, writer) - - silent_frames_path = Path( - get_frame_dir(preprocess_dir, silent_video_path, ratio=1.0) - ) - silent_frames_path.mkdir(exist_ok=True, parents=True) - with av.open(silent_video_path) as container: - for frame in container.decode(video=0): - Image.fromarray(frame.to_ndarray(format="rgb24"), mode="RGB").save( - f"{silent_frames_path}/%05d.webp" % frame.index, - format="webp", - lossless=True, - ) - shutil.rmtree(template_frames_path, ignore_errors=False) - silent_frames_path.rename(template_frames_path) diff --git a/stf/stf-api-tools/src/stf_tools/silent.py b/stf/stf-api-tools/src/stf_tools/silent.py deleted file mode 100644 index db1255841b5be8ef9b251047b34861db75df5d19..0000000000000000000000000000000000000000 --- a/stf/stf-api-tools/src/stf_tools/silent.py +++ /dev/null @@ -1,28 +0,0 @@ -from concurrent.futures import ThreadPoolExecutor - -from pydub import AudioSegment - - -def create_silent_video(template, writer): - reader = iter(template._get_reader(num_skip_frames=0)) - audio_segment = AudioSegment.silent(10000) - pivot = 0 - - with ThreadPoolExecutor(4) as p: - try: - while True: - gen_infer = template.gen_infer_concurrent( - p, - audio_segment, - pivot, - ) - for idx, (it, chunk) in enumerate(gen_infer, pivot): - frame = next(reader) - composed = template.compose(idx, frame, it) - writer.video_writer.write(composed) - writer.audio_writer.write(chunk) - pivot = idx + 1 - except StopIteration as e: - pass - - writer.finish(forced=False) diff --git a/stf/stf-api-tools/src/stf_tools/writers/.ipynb_checkpoints/ffmpeg-checkpoint.py b/stf/stf-api-tools/src/stf_tools/writers/.ipynb_checkpoints/ffmpeg-checkpoint.py deleted file mode 100644 index 6c51235215cfc41582c8af1667a91c5471dd37e0..0000000000000000000000000000000000000000 --- a/stf/stf-api-tools/src/stf_tools/writers/.ipynb_checkpoints/ffmpeg-checkpoint.py +++ /dev/null @@ -1,100 +0,0 @@ -import os -import queue -import shutil -import subprocess -import tempfile -import threading -import time -import traceback -from abc import ABC, abstractmethod -from contextlib import contextmanager -from pathlib import Path -from queue import Queue - -import ffmpeg -import numpy as np -import pydub -from pydub import AudioSegment - -from stf_tools.writers._async import AudioAsyncWriter, VideoAsyncWriter -from stf_tools.writers._thread import AudioThreadWriter, VideoThreadWriter - -video_pipe_name = "video" -audio_pipe_name = "audio" - - -class BaseFFMPEGWriter(ABC): - def __init__( - self, - path, - width, - height, - fps, - crf=17, - audio_sample_rate=16000, - quiet=True, - ): - self.path = Path(path) - self.width = width - self.height = height - self.fps = fps - self.crf = crf - - self.path.parent.mkdir(exist_ok=True, parents=True) - - pipe_root = tempfile.mkdtemp() - self.pipe_dir = Path(pipe_root) - self.video_pipe_path = self.pipe_dir / video_pipe_name - self.audio_pipe_path = self.pipe_dir / audio_pipe_name - - os.mkfifo(self.video_pipe_path) - os.mkfifo(self.audio_pipe_path) - - self.audio_sample_rate = audio_sample_rate - - self.write_process = self._run_ffmpeg( - quiet=quiet, - ) - - @abstractmethod - def _run_ffmpeg(self, quiet): - """ffmpeg writer using named pipe""" - - -class ThreadFFMPEGWriter(BaseFFMPEGWriter): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - self.video_writer = VideoThreadWriter(self.video_pipe_path, self.fps) - self.audio_writer = AudioThreadWriter( - self.audio_pipe_path, self.audio_sample_rate - ) - - def finish(self, forced=False): - self.video_writer.finish(forced=forced) - self.audio_writer.finish(forced=forced) - - if forced: - self.write_process.kill() - else: - self.write_process.wait() - - shutil.rmtree(self.pipe_dir, ignore_errors=True) - - -class AsyncFFMPEGWriter(BaseFFMPEGWriter): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - self.video_writer = VideoAsyncWriter(self.video_pipe_path, self.fps) - self.audio_writer = AudioAsyncWriter( - self.audio_pipe_path, self.audio_sample_rate - ) - - def finish(self, forced=False): - if forced: - self.write_process.kill() - else: - self.write_process.wait() - - shutil.rmtree(self.pipe_dir, ignore_errors=True) diff --git a/stf/stf-api-tools/src/stf_tools/writers/.ipynb_checkpoints/webm-checkpoint.py b/stf/stf-api-tools/src/stf_tools/writers/.ipynb_checkpoints/webm-checkpoint.py deleted file mode 100644 index b8b2356c5b7ebdcd72c580a8622c9ab986594f30..0000000000000000000000000000000000000000 --- a/stf/stf-api-tools/src/stf_tools/writers/.ipynb_checkpoints/webm-checkpoint.py +++ /dev/null @@ -1,60 +0,0 @@ -import subprocess - -from stf_tools.writers.ffmpeg import ThreadFFMPEGWriter - - -class WebmWriter(ThreadFFMPEGWriter): - def _run_ffmpeg(self, quiet): - return subprocess.Popen( - [ - "ffmpeg", - "-f", - "rawvideo", - "-pix_fmt", - "rgba", - "-r", - f"{self.fps}", - "-s", - f"{self.width}x{self.height}", - "-thread_queue_size", - "1024", - "-probesize", - f"{self.width*self.height}", - "-i", - self.video_pipe_path, - "-f", - "s16le", - "-ac", - "1", - "-acodec", - "pcm_s16le", - "-ar", - "16k", - "-thread_queue_size", - "4096", - "-probesize", - "32", - "-i", - self.audio_pipe_path, - "-map", - "0:v:0", - "-map", - "1:a:0", - "-pix_fmt", - "yuva420p", - "-crf", - f"{self.crf}", - "-r", - f"{self.fps}", - "-s", - f"{self.width//2*2}x{self.height//2*2}", - "-threads", - "16", - "-vcodec", - "libvpx-vp9", - str(self.path), - "-y", - ], - stdout=subprocess.DEVNULL if quiet else None, - stderr=subprocess.STDOUT if quiet else None, - ) diff --git a/stf/stf-api-tools/src/stf_tools/writers/__init__.py b/stf/stf-api-tools/src/stf_tools/writers/__init__.py deleted file mode 100644 index 8e8fdf16b03db24d6b1e512c562ba013163b41f7..0000000000000000000000000000000000000000 --- a/stf/stf-api-tools/src/stf_tools/writers/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from stf_tools.writers.ffmpeg import AsyncFFMPEGWriter, ThreadFFMPEGWriter -from stf_tools.writers.webm import WebmWriter diff --git a/stf/stf-api-tools/src/stf_tools/writers/_async.py b/stf/stf-api-tools/src/stf_tools/writers/_async.py deleted file mode 100644 index 5d02c643505a24ad67e37cb2f8f7d2ccbacae36c..0000000000000000000000000000000000000000 --- a/stf/stf-api-tools/src/stf_tools/writers/_async.py +++ /dev/null @@ -1,42 +0,0 @@ -import asyncio - -import aiofiles -import numpy as np -import pydub - - -class AsyncWriter: - def __init__(self, path): - self.queue = asyncio.Queue(maxsize=240) - self.path = path - - async def pipeline(self): - try: - async with aiofiles.open(self.path, "wb", 0) as f: - while (bytes := await self.queue.get()) is not None: - await f.write(bytes) - except: - pass - - async def write_bytes(self, bytes): - await self.queue.put(bytes) - - -class VideoAsyncWriter(AsyncWriter): - def __init__(self, path, fps): - super().__init__(path) - self.fps = fps - - async def write(self, video: np.array): - return await self.write_bytes(video.astype(np.uint8).tobytes()) - - -class AudioAsyncWriter(AsyncWriter): - def __init__(self, path, audio_sample_rate): - super().__init__(path) - self.audio_sample_rate = audio_sample_rate - - async def write(self, audio: pydub.AudioSegment): - return await self.write_bytes( - audio.set_frame_rate(self.audio_sample_rate).raw_data - ) diff --git a/stf/stf-api-tools/src/stf_tools/writers/_thread.py b/stf/stf-api-tools/src/stf_tools/writers/_thread.py deleted file mode 100644 index c877b1056fdf7709dc441a257fdb328d47c2e5d0..0000000000000000000000000000000000000000 --- a/stf/stf-api-tools/src/stf_tools/writers/_thread.py +++ /dev/null @@ -1,57 +0,0 @@ -import threading -import time -import traceback -from queue import Queue - -import numpy as np -import pydub - - -class ThreadWriter: - def __init__(self, path): - queue = Queue(maxsize=240) - self.finished = False - - def write_bytes(): - try: - with open(path, "wb", 0) as f: - while (bytes := queue.get()) is not None: - f.write(bytes) - except Exception as e: - traceback.print_exc() - self.finished = True - - self.thread = threading.Thread(target=write_bytes) - self.queue = queue - - self.thread.start() - - def write_bytes(self, bytes): - if self.finished: - return - self.queue.put(bytes) - - def finish(self, forced=False): - self.queue.put(None) - if forced: - self.finished = True - else: - self.thread.join() - - -class VideoThreadWriter(ThreadWriter): - def __init__(self, path, fps): - super().__init__(path) - self.fps = fps - - def write(self, video: np.array): - return self.write_bytes(video.astype(np.uint8).tobytes()) - - -class AudioThreadWriter(ThreadWriter): - def __init__(self, path, audio_sample_rate): - super().__init__(path) - self.audio_sample_rate = audio_sample_rate - - def write(self, audio: pydub.AudioSegment): - return self.write_bytes(audio.set_frame_rate(self.audio_sample_rate).raw_data) diff --git a/stf/stf-api-tools/src/stf_tools/writers/ffmpeg.py b/stf/stf-api-tools/src/stf_tools/writers/ffmpeg.py deleted file mode 100644 index 6c51235215cfc41582c8af1667a91c5471dd37e0..0000000000000000000000000000000000000000 --- a/stf/stf-api-tools/src/stf_tools/writers/ffmpeg.py +++ /dev/null @@ -1,100 +0,0 @@ -import os -import queue -import shutil -import subprocess -import tempfile -import threading -import time -import traceback -from abc import ABC, abstractmethod -from contextlib import contextmanager -from pathlib import Path -from queue import Queue - -import ffmpeg -import numpy as np -import pydub -from pydub import AudioSegment - -from stf_tools.writers._async import AudioAsyncWriter, VideoAsyncWriter -from stf_tools.writers._thread import AudioThreadWriter, VideoThreadWriter - -video_pipe_name = "video" -audio_pipe_name = "audio" - - -class BaseFFMPEGWriter(ABC): - def __init__( - self, - path, - width, - height, - fps, - crf=17, - audio_sample_rate=16000, - quiet=True, - ): - self.path = Path(path) - self.width = width - self.height = height - self.fps = fps - self.crf = crf - - self.path.parent.mkdir(exist_ok=True, parents=True) - - pipe_root = tempfile.mkdtemp() - self.pipe_dir = Path(pipe_root) - self.video_pipe_path = self.pipe_dir / video_pipe_name - self.audio_pipe_path = self.pipe_dir / audio_pipe_name - - os.mkfifo(self.video_pipe_path) - os.mkfifo(self.audio_pipe_path) - - self.audio_sample_rate = audio_sample_rate - - self.write_process = self._run_ffmpeg( - quiet=quiet, - ) - - @abstractmethod - def _run_ffmpeg(self, quiet): - """ffmpeg writer using named pipe""" - - -class ThreadFFMPEGWriter(BaseFFMPEGWriter): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - self.video_writer = VideoThreadWriter(self.video_pipe_path, self.fps) - self.audio_writer = AudioThreadWriter( - self.audio_pipe_path, self.audio_sample_rate - ) - - def finish(self, forced=False): - self.video_writer.finish(forced=forced) - self.audio_writer.finish(forced=forced) - - if forced: - self.write_process.kill() - else: - self.write_process.wait() - - shutil.rmtree(self.pipe_dir, ignore_errors=True) - - -class AsyncFFMPEGWriter(BaseFFMPEGWriter): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - self.video_writer = VideoAsyncWriter(self.video_pipe_path, self.fps) - self.audio_writer = AudioAsyncWriter( - self.audio_pipe_path, self.audio_sample_rate - ) - - def finish(self, forced=False): - if forced: - self.write_process.kill() - else: - self.write_process.wait() - - shutil.rmtree(self.pipe_dir, ignore_errors=True) diff --git a/stf/stf-api-tools/src/stf_tools/writers/webm.py b/stf/stf-api-tools/src/stf_tools/writers/webm.py deleted file mode 100644 index b8b2356c5b7ebdcd72c580a8622c9ab986594f30..0000000000000000000000000000000000000000 --- a/stf/stf-api-tools/src/stf_tools/writers/webm.py +++ /dev/null @@ -1,60 +0,0 @@ -import subprocess - -from stf_tools.writers.ffmpeg import ThreadFFMPEGWriter - - -class WebmWriter(ThreadFFMPEGWriter): - def _run_ffmpeg(self, quiet): - return subprocess.Popen( - [ - "ffmpeg", - "-f", - "rawvideo", - "-pix_fmt", - "rgba", - "-r", - f"{self.fps}", - "-s", - f"{self.width}x{self.height}", - "-thread_queue_size", - "1024", - "-probesize", - f"{self.width*self.height}", - "-i", - self.video_pipe_path, - "-f", - "s16le", - "-ac", - "1", - "-acodec", - "pcm_s16le", - "-ar", - "16k", - "-thread_queue_size", - "4096", - "-probesize", - "32", - "-i", - self.audio_pipe_path, - "-map", - "0:v:0", - "-map", - "1:a:0", - "-pix_fmt", - "yuva420p", - "-crf", - f"{self.crf}", - "-r", - f"{self.fps}", - "-s", - f"{self.width//2*2}x{self.height//2*2}", - "-threads", - "16", - "-vcodec", - "libvpx-vp9", - str(self.path), - "-y", - ], - stdout=subprocess.DEVNULL if quiet else None, - stderr=subprocess.STDOUT if quiet else None, - ) diff --git a/stf/temp.mp4 b/stf/temp.mp4 deleted file mode 100644 index 378752eb1f81a7bd29a2cc04bd03953a7c7c260a..0000000000000000000000000000000000000000 Binary files a/stf/temp.mp4 and /dev/null differ diff --git a/stf/test.py b/stf/test.py deleted file mode 100644 index 9a1e25cb1363a3ebe631ccc535e941961669aea7..0000000000000000000000000000000000000000 --- a/stf/test.py +++ /dev/null @@ -1,30 +0,0 @@ -from time import time -from datasets import load_dataset -from faster_whisper import WhisperModel -# from transformers import WhisperForConditionalGeneration, WhisperProcessor - -ds = load_dataset("hf-internal-testing/librispeech_asr_dummy", "clean", split="validation", cache_dir=".") - -# processor = WhisperProcessor.from_pretrained("openai/whisper-large-v3") -# model = WhisperForConditionalGeneration.from_pretrained("openai/whisper-large-v3").to("mps") -model = WhisperModel("large-v3", device="cuda", compute_type="float16", download_root=".") - -audio_sample = ds[0]["audio"] -waveform = audio_sample["array"] -sampling_rate = audio_sample["sampling_rate"] - -tic = time() -# input_features = processor( -# waveform, sampling_rate=sampling_rate, return_tensors="pt" -# ).input_features -segments, info = model.transcribe(waveform, beam_size=5) -# predicted_ids = model.generate(input_features.to("mps")) - -# transcription = processor.batch_decode(predicted_ids, skip_special_tokens=True) - -toc = time() - -# print(transcription[0]) -for segment in segments: - print("[%.2fs -> %.2fs] %s" % (segment.start, segment.end, segment.text)) -print(toc - tic) \ No newline at end of file diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00000.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00000.webp deleted file mode 100644 index fbed6f08e4c18d8b4e606fb157366680dd13b51d..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00000.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:60b59a4d906bf7513f4d868bc4982bfaa6235840717a89df791948f6666b314b -size 764090 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00016.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00016.webp deleted file mode 100644 index 5b077d5bf807f8f70efaf6d71e12e57ea39d55df..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00016.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a004eae0678afb9a231d6045388b3893dd30f77a5f98adc97a194853021dfc9e -size 1468454 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00041.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00041.webp deleted file mode 100644 index 923c119ca2dd8eb3bc18a91321d2b44489699f13..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00041.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a8c75345ab268f384da24750a4be6118c27368feda40104cb385f625b27f7db2 -size 1797652 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00057.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00057.webp deleted file mode 100644 index 2c398a089364234f9a631c62d3382a0a7b6b8a90..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00057.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:070e32ea7809827797e9a0af32050faba3b079cc1bd552503dd8924f20283546 -size 1830823 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00082.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00082.webp deleted file mode 100644 index 5961b1fe9004d7742b0be2c62fca9c184c6a928d..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00082.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:04fc5356c70bb1fb00c95c5a1b39d78bd542d6a05bc67d5a33134e2a0dabbfca -size 1887865 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00094.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00094.webp deleted file mode 100644 index 67af5c9b46ce793002a778531e21076f4ac41978..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00094.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5240756c5b9de183158d1b95531578e7252730b681df624981280ddabac7dd6c -size 1870689 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00104.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00104.webp deleted file mode 100644 index 318fdc96477a92554e1041f691f5969bf19e36ca..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00104.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:17170fc573c69fa8857ce0e8cbc5e0ea3518c3329f463e475c59d451cb1a27cc -size 1984633 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00112.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00112.webp deleted file mode 100644 index 0577ea8ba822ac018f9cb8af502b0591a11221db..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00112.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ded5595aa8cb136c01deefcf4b67685e824f972174bb54ca8ef2902b4931aa7b -size 2147793 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00128.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00128.webp deleted file mode 100644 index 806d818212b3f476ef2dccccf938655dd0bd90ed..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00128.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:db9d00ae62e5ac77196f5990d551248f1276ede8f2f748406e5dca8fce291d70 -size 2153309 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00145.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00145.webp deleted file mode 100644 index ffd6f7deab49cc9fb28e4e99dcc0a4dc9676c95c..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00145.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:00620161b2909760c9812255ee0f226b8890a194db70cb80f876a8f8263a300f -size 1906796 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00149.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00149.webp deleted file mode 100644 index 2503fda1434f427dfdb25f3964a96bfa4090fea5..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00149.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:02eeead86e86975cdf56d8d93c702e40f930f2f143b23979f11f62628a4f15c2 -size 1932133 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00153.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00153.webp deleted file mode 100644 index e61d2b197ccd2b6789a350054900f5f56e2b7d2b..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00153.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e8d7995686e66e422c67dcc95df31b4b427cc588c6153fc09b033cc19e9cf22f -size 2358193 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00169.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00169.webp deleted file mode 100644 index 1edae355b1905a6ff27e9019f5138500c7a44048..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00169.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:948ea8fa1af29a575be586af3037cb58c2f7b6856e747ec8ed10f0d437511b5e -size 2219010 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00186.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00186.webp deleted file mode 100644 index 636b6c055d0b0b53019941c962f818d7b85bf5a6..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00186.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b5f52ae5c099f3f07667ad5d485c3d04737fc3f084f588e80aeba53f461675c3 -size 2190709 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00190.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00190.webp deleted file mode 100644 index 566ea49da4d53c92240fb05d221a0851bde1f343..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00190.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1e4e911d8f0a10bd2f16de9dfa29efe502b2a098468c4571808aadfd5324e53b -size 2135415 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00201.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00201.webp deleted file mode 100644 index 4c71569122573c7d7b0c29db84b85beba5f08213..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00201.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:407d963bf93af3437f855a5a2806b557be4940b398f68472c5ef7e819056d84c -size 2065384 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00217.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00217.webp deleted file mode 100644 index 4433f3bb862615991fb27aeb82d434e9b8eae727..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00217.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:35a00e2dd8f483cdbdb518e930f2af6ccedea339364e1306fb0e52c2b284e1b8 -size 2104914 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00237.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00237.webp deleted file mode 100644 index f719bbaee51d70b74f3c6cebb9504a910c5eff6b..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00237.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:fff85a77821dc507260719edc03353ce08ca844fa5b2f6e97c13713b59cf3477 -size 2180995 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00240.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00240.webp deleted file mode 100644 index 1ebb26c7597c081f90b3c731507213b8e37f4f97..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00240.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a112e637c0b56c969202bdcdb56090bfeed4885d27adf8aeab1f9695d80afbd5 -size 2257848 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00256.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00256.webp deleted file mode 100644 index 2375fd60e39f6deae87d1d214de47b540f8bbf08..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00256.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1730e81cc2dc2b89bdb90f9f15ccf2c66d02ee1a1fa4dfe5b4800c8ad522fd43 -size 2158277 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00260.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00260.webp deleted file mode 100644 index 18d76ef50daa4a3c6dc1716fa572be0a61327657..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00260.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:62aa532c203484fdb72764c1d82886745465a60df5676a761bb92df76b102ddb -size 2095110 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00283.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00283.webp deleted file mode 100644 index 88d007a1b18ffd0de194568cda58ada44ffd1899..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00283.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f2ff19801a04b7b0e83508c4e24a9ecec7392ede67eff397a87209312eb67eb6 -size 2258071 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00295.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00295.webp deleted file mode 100644 index bd6f3bab35656b72cd423adacee6eebce6c6c2f9..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00295.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7d1345f5deea39ae26e0ba21925bfab4af9ae11fd052e6921530efb9845ff99d -size 2064154 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00305.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00305.webp deleted file mode 100644 index a5f147d4767675dcf230db33768e7bdd5aeb69a7..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00305.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:dc39b9a682a7fed990ddf36b317736747cb1b495804eaddb4ac1dc33ea9204b1 -size 2189497 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00313.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00313.webp deleted file mode 100644 index 144a68464d0be3d4b7efca26ec861156cffc3e3d..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00313.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0cdf8710edaa1719a163b5468dd0c23e0ad7f39a81876170f76f95cd69030e4f -size 2195415 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00325.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00325.webp deleted file mode 100644 index 711b4d058064600661f7d2e821df680cf0aedf44..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00325.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a42f6cd838dcbf051811cafde5c3bcfddb28b3c3bfa5cf8ec31c33d3e1e3cb85 -size 2114626 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00329.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00329.webp deleted file mode 100644 index abc83862178501478e8fc82fc300c4b3b2c73f2e..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00329.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:119a0c6052c319c1b99fcfa04b2362cb5c4a2b3f42b47c930d3c9ea7861e41c5 -size 2208940 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00344.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00344.webp deleted file mode 100644 index 02f63644f3d851801b93febb186db59d6b0303fc..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00344.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c6c6ab713af78dd127261c5b88cadce469f9d88bae82b965d0dcd55b995b8741 -size 1999415 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00352.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00352.webp deleted file mode 100644 index 6b87a5fe1e823b6c660afb4126dfbeccfd8a375c..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00352.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:092780fa54d7b0475806cd384ee7d109fdbe66f0ce490d28026f544ce4d9edf2 -size 2177688 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00368.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00368.webp deleted file mode 100644 index ee4b23f5eda660d486c5a7b80ca93fe61111fe48..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00368.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f23a2f5f52f1e03d2cd8c511a36a27de33a9e4d5ba8a3581e95463381cdfb5d6 -size 2197338 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00372.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00372.webp deleted file mode 100644 index 36b13cf6a3eb52dd928bb2f578d655a6ba6eb86c..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00372.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:28584b8baa105c447d6371797164815bfafb6df60fb11126574649a12eb61460 -size 2135804 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00387.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00387.webp deleted file mode 100644 index dc0fc71552ed81f90386246c376c6e71637b21f5..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00387.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ca00ed089a15673e9b16f42f6a95c9ff1300c4579e2e69906b8a2a789351fcf3 -size 1959150 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00391.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00391.webp deleted file mode 100644 index 8c47761c40390f1dd23e8510ab423b166ed6bdb3..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00391.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b048c94137456d07fc096363443bdc3cb756959842a43234924a1d621912033f -size 2022107 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00407.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00407.webp deleted file mode 100644 index 483453310ef7aeaa66aa3dd3e85ceae8d31b5611..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00407.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f22209f292b0937efdd7fa04278ca428c749f65715a1bd11cb30ac6ab3c442b2 -size 2115686 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00411.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00411.webp deleted file mode 100644 index 4071fea1f49fe9ee2b4a8a3b7e26b0dd7c75e2d4..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00411.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:389807077297b72306d2986d02c09395bcd2a2e725f1732d911b800ac3d5a56c -size 2186430 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00446.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00446.webp deleted file mode 100644 index 8cb4beeacfaaaee6b6fd3c3c5a36ce4084330759..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00446.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b5aba56829706c15038ebe8e80e9e0e60928d61949772a254bfbf8e50335cda5 -size 2105221 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00450.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00450.webp deleted file mode 100644 index 980368bfbd178ecc69648910d3a70353e599bbf4..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00450.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4ad5e10dcdf37ef1e3c15e8000058311319c27872c120a400c9a187f382c6d4d -size 2167769 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00485.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00485.webp deleted file mode 100644 index 93727637e0fe55dfb8cf122a77db6c4cc0161084..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00485.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5ad8686baff184a5f8b5b437672e8b5cbf887934ae554603bee03f1dde725d30 -size 2037901 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00493.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00493.webp deleted file mode 100644 index 8ba86b1f3e68f841a9ed363df4a8165578c50539..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00493.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:07eb01a86ae80b34616a36dace0a8d63b286841c2d3df28d50053d70e10317fe -size 2019374 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00503.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00503.webp deleted file mode 100644 index c740eb0a085f9663ab742814c466046656510b0b..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00503.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:14ae3c96513c3ca13153980d9fffc6aa2704149190bba8126e590706a6100127 -size 2093938 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00515.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00515.webp deleted file mode 100644 index bc3be35ed2ba7825db25b12b9664bf48ede8b29d..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00515.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8d86306795ce4661c5d10b9d44c9970bcae70afbdc9acc9502098e6d8aa72773 -size 2194577 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00519.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00519.webp deleted file mode 100644 index 00491ba6227d71e1fc671d61af4ffdb36279ccf0..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00519.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f1e6642a2cdc49168a8ff3d732ad4f6507599ff41a58c44aa0dc0082770aa064 -size 2146530 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00539.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00539.webp deleted file mode 100644 index 6d62184d1d05f3a2d7c083a4272c6f9cd7b50370..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00539.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:776edd74525128ca6ce08ab62d89a4562f2f95f5ee7032ea7921b1db381012d4 -size 2115635 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00542.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00542.webp deleted file mode 100644 index 2a643282057e3475ce7b7f0f83eb3f7802822811..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00542.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0547b6208e4f8db6a5903e9e06c15b13c2a360d75dee5e84f87567974b5f7041 -size 2186785 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00554.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00554.webp deleted file mode 100644 index d38b23eb188e8b5b2a601a66007cdd7dceff1559..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00554.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:62c72c0deb1af327b65a3c1314353044ad9a4a24636bde2da1e4344a4b466d3c -size 2117724 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00578.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00578.webp deleted file mode 100644 index 3995acb22ec425c19bc59cbb04a6281972e73269..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00578.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2f3842c05279780fde54c5d732ab4400cb05bcb8d050a49d07556b4876211f94 -size 1821090 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00581.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00581.webp deleted file mode 100644 index 5641b0c9a00f02ee6fe6874d5770c9c9cb61cd5e..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00581.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d614bf47e1a76141526d8c7d71e114fb3fc002da6bf8b89c0d30d70f6f8a8d4b -size 1961950 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00597.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00597.webp deleted file mode 100644 index 2ae180f0c51389bdf74824a267ba79401415e01c..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00597.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0a22cab23fbc868fb2ad77136b1dd656b39ec8bfee6c4bb30112a2bf2611429a -size 2120139 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00606.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00606.webp deleted file mode 100644 index c8ef3702dcefecd5171969c787fd5e5a96808a55..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00606.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a53a253f9532b7f2fc4cd9092a102d3377c169224a731056751526e66b787bb6 -size 2289744 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00610.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00610.webp deleted file mode 100644 index 02bd46d540bc926165f54515d704972f64b15de9..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00610.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:459fcc5828e5239969ff7c40727bc5371718bb1798be6eba619ef0afdc3cb9b2 -size 2145236 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00630.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00630.webp deleted file mode 100644 index 929a2473bf9b3ad8489c9585de6e9d471499aed1..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00630.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3df849e5dcf90ad964a8b7152aefcbafdedd55e3666902a6c11198539633b88c -size 2033859 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00647.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00647.webp deleted file mode 100644 index 846f03d616bb7275d4a4fe7db5d33fe02fcec9b5..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00647.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1ba06735cd17b8685944ef2e2857ad46ca5c454776e1abcc11a56573fdfe2f86 -size 2218629 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00651.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00651.webp deleted file mode 100644 index a38100dab0ace1adf5cdb22c208f8d48c2534163..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00651.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2bbb8a7843d152d5243560b03f222852d1acb7484339028a23fdd54ac8381617 -size 2269132 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00667.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00667.webp deleted file mode 100644 index f65e5c4bc9e8c0cb3778508863a536faaf6bb8ac..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00667.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3897a54f169acf3a9f84f121bf1530b9544526d50112dbc27c7a048542fd7f44 -size 2257789 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00684.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00684.webp deleted file mode 100644 index 76c8b71e8ae864046c71eca5ac9de1c8e1842d99..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00684.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ebb0ff3e136a540f8d6008147c137eac133c90c39d3ba52fe6e7fbedab9d6df5 -size 2038258 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00688.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00688.webp deleted file mode 100644 index 770859dbb3d8f37c5ee3a31ed5ba686559fca5fb..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00688.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5679ef3ac196778ffa27f521c47e85e0f422b801b138fa33a3ed1b70b6f8d6fb -size 2162010 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00692.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00692.webp deleted file mode 100644 index 2fefa7d446224884d5286dca029c6284c6533618..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00692.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:408e284424e21a847c4c15fe32c526e500bbe9f8e12273424afd8dc664f7343f -size 2226504 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00702.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00702.webp deleted file mode 100644 index f1c32ab78f9434a712ac6b9f6c61bf273a734997..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00702.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d73966b1e5b3fbe218a370ae39ca76a44de6cd40301e27daaba23c6a97e1fc9b -size 2279096 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00714.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00714.webp deleted file mode 100644 index ee09ee7094005cc19ae50907f5db416d117d9e23..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00714.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:07d7df0c16c02ed6f41f9c26967220a3342a5f378e1c90b26ae8a086140be298 -size 2161429 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00722.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00722.webp deleted file mode 100644 index 97565904e8ad1fed154c339555d1955e353aeb3a..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00722.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:deb7804e76aa0b355ef2a0b265d82f0a085f7ad23230bd82a0dc638e3fca7ed8 -size 1794160 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00738.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00738.webp deleted file mode 100644 index 88c4807f631fa4551725099287108499c06aa4c5..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00738.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7f0b8592ee62fa1721e31d2d6c752c38737986500de66012a39986b07d777a63 -size 2128492 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00743.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00743.webp deleted file mode 100644 index 88b46563770fe6503ada58bd6ee73371e2c29213..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00743.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5dfb2114842d11a2fc9e38375693cbd65fe1e3eed9f3395f2643160178b4016d -size 2218281 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00755.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00755.webp deleted file mode 100644 index bb73adb710e7985b5a7838c08d5086e586c00b10..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00755.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0429ca952e2aaf100487939a1a56eed6f3ef70129538d33e88fae0373582ff39 -size 2252985 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00775.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00775.webp deleted file mode 100644 index 9b3325bc680e21a05c03210abcef1ecdce1d1e17..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00775.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a0111a6cb4ccf7e91535c78c3bafe32a8b01935038a611342e2a988064dd5fd5 -size 2073934 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00779.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00779.webp deleted file mode 100644 index 8d79e39b5329d37013edef52173c38707ca9ced0..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00779.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:eaf002c733690c048a5290edc7f5220da1bb8ccbd0d209c210f91f27256e5778 -size 2035249 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00780.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00780.webp deleted file mode 100644 index 4881345db7b0dc4a0fe9e40ef87fea2747c5cd32..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00780.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:fad40d32a4bcca3c751d24da07e8c69f49f5f470005f2b7f9d98ba105b6ceecb -size 2026605 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00796.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00796.webp deleted file mode 100644 index 7d7533889234eb858a34d715ab661f5a83a37656..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00796.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:365432a97f5bb409e582a886f42e456af95c700e61e4da432d53c45961cfaed6 -size 2204851 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00807.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00807.webp deleted file mode 100644 index 6760f379b9beb14bc1a71656955f8288a7230a79..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00807.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8968fac7a55ea016f3d0b1d59d04ba1a82a31f02016a89189eb3668c734c48ef -size 2125103 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00811.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00811.webp deleted file mode 100644 index 0e27914e53062c65675242a1621e9c403c92ba7d..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00811.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:25a1d8adea49a44111e29db07a9538a30b3f073d1c68bbe449e85cad7998d965 -size 2125537 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00846.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00846.webp deleted file mode 100644 index f8a23c6f594aae220dc19ac216d0075adde88862..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00846.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d8861f9ffd7744985d8508409dfd6e377d469e67c9a2686a8d4f23c10f002474 -size 2178891 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00850.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00850.webp deleted file mode 100644 index 987beead8b99bd0346f659bb7437ab5d0d3853d6..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00850.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ee8700a93053fdf0d5525e9ae5395ed1531c78d36a56ddeff3d1654a73dd305e -size 2113643 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00885.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00885.webp deleted file mode 100644 index 2067951f4999c27bb041f648ea1aadbf85c7f78b..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00885.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e0d3d3654590b2d5245b1ffea27b39871dbcc21172af7dd364f541f481bb3cf6 -size 2128004 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00893.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00893.webp deleted file mode 100644 index 681b84120521b2fd9daba229c6b3a317a36171f6..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00893.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0411d19f6666e5380dfca944539f377960f69ea50ac8c6165dd048fc4f734f9d -size 2141186 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00903.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00903.webp deleted file mode 100644 index 23b311da68e1ed351d858413b22613a6330e558e..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00903.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0de52c8b04223b31ad65aa9ddfdad1d1790b86e3fd170a0f6a8f049262f7cbb4 -size 2124904 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00915.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00915.webp deleted file mode 100644 index f7194b69a00adb79df26f0d50b56a4640de9a461..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00915.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c42f20053384d8293c0993f8a0f76e54aedd447b1f330240ae337c22de28de09 -size 1921314 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00919.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00919.webp deleted file mode 100644 index 94cb7d6d26fd06bead4ea4b0b72af93e729926b8..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00919.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:130770720e39a038fecadd0c545ba081e5919ce46869aeb75ce75577dc124498 -size 1992701 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00939.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00939.webp deleted file mode 100644 index 1a84eb771ea822cceabc1284c99f1aff7e2e316a..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00939.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:cfe7e9901bb29854c58aa93d60b7a74ecfb47ff94f578f78d9557273495d40d8 -size 2267190 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00942.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00942.webp deleted file mode 100644 index efd7e550a6fef8bcc2433a20825667e656b65d45..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00942.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:80abcda8de7f01626aa3dbae548c8419ead26dfaee18401901beadb5a24233b5 -size 2148503 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00954.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00954.webp deleted file mode 100644 index fa982d6ef790d522a6036b0a9efa6f6151ea0ba8..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00954.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:fa5581435cfd61a6c2835d5ccf5a97ba79b3ca2e572377ee154d06bd40c442a4 -size 2197297 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00958.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00958.webp deleted file mode 100644 index 27394fc90ce08206c7b9ec787e760cdc2b56a737..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00958.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e20d789aef029b942a9125d2f8e5e38ab7e420784a15b1f8258c5d4f7ef24e59 -size 2118241 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00978.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00978.webp deleted file mode 100644 index 1fc9f8b2bf78c6aa9495a23bcd759dfe1623855c..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00978.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4cb9efbeebe427211ee0a932e03fb601e304ba7fb11bd2059f9d0e49c8bcc3f1 -size 2262873 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00981.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00981.webp deleted file mode 100644 index bc2638bfdb124d4706722dd0a299ead39ad34c4f..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00981.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:87c8735d8c46c0fb7ccb6383f03a53c0e29cffeb6e70088a8359aba2dfa21f7c -size 2109775 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00997.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00997.webp deleted file mode 100644 index c386c3734f253cf2524ba05c1c1d8e0c915695d8..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/00997.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:116843d728fda5d548014a039ad1adcac449644893eb8c42ec1d63fe96593278 -size 2134875 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01001.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01001.webp deleted file mode 100644 index 8d8ec5b17cbc19dd36ecec5ea415a66d96c906ec..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01001.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:21eabd334fa57d7cbbd6611dd04820984fa8eac5fbaab37b06023b0627952317 -size 2193520 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01017.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01017.webp deleted file mode 100644 index 838e69b0e239b434d769dd8dd555a0e76d94f568..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01017.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a624d2306fefbfa41c10200e09fde4c8cf2dda3d043dc42876882c6b4d22cbf8 -size 2374793 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01021.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01021.webp deleted file mode 100644 index 00577beb23a12a32f5b35382cea6c9a0c72e232e..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01021.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:92034859ee9b28bec3bcf4cfa51c450364bf92d17cd86359095754821ff96e72 -size 2083947 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01040.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01040.webp deleted file mode 100644 index b8fc67c48f997dda6ec4cd583f8601d0479eb063..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01040.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:55f0d7957498ca24541b325d912ba534490b354bbf661dfc50d36f352ed05373 -size 2191009 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01056.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01056.webp deleted file mode 100644 index 1b036d33a729ca18bdc482d6fd893f5c05e14144..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01056.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a9d89124cd586012c812b8436d7edcf66ddf2079a6e03746c9fd64211a9183ac -size 1720868 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01076.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01076.webp deleted file mode 100644 index cb07725ff92888d905e465d552c9aea2525d5a16..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01076.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a6210acc89cbb033de0427a282fb66b02f81c01c74c82b0db96ed38ce193266d -size 2286201 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01083.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01083.webp deleted file mode 100644 index 4ac152cc2ebef7ff8a49fab2b45638810cca994f..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01083.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5aa4e50c4e133a2c07324ed0d1b22df66ff53ba7de32710fcb3bbde5e3c16dcf -size 2201030 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01095.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01095.webp deleted file mode 100644 index ffff9657941d1d961515c819caf546ab73f85fc0..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01095.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:67ac088e653620ea830cc13ebf7ded697dc00e296c15a3495851ccb57a3b07a4 -size 2129796 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01099.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01099.webp deleted file mode 100644 index 4081c382ae5330d2521165b30573ec3d7af8a1ee..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01099.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:32070afc0940e4b36248419038138c4d4e2fff61f4b4d3af63eb95d712ae3d50 -size 2187430 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01105.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01105.webp deleted file mode 100644 index d5792f72e660f86fe6650687cee24b32f4244b85..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01105.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:331e90091d3165f9ee0790fd3a0b74d3c52e65c4058fb4a5bd83bd2bb0ee68de -size 1765445 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01113.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01113.webp deleted file mode 100644 index 20c5bdd5151fb8e3ca2f36caf9ad5dd34ec268e4..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01113.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:84112f93c41289cffd5963f653661810aebff5764c1fd4e3b43ddd00e81ac79a -size 2189551 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01129.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01129.webp deleted file mode 100644 index 3390e446a8bd67941da94eaa9ca989fff3008ed1..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01129.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b4e2ba334a230886894be172abe160315498fa2e6139504afc14a3e21af6f7b6 -size 2215972 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01133.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01133.webp deleted file mode 100644 index be3f8c290f71ad26af9a2195ddbb1cab7992dff8..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01133.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:447744f19f9bd6ecb744f0d4b92ace4db4f13c9addb1bbbb142bd1eef593ec57 -size 2198435 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01144.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01144.webp deleted file mode 100644 index 578ae997e5d2f359b8a38a598fbb12225198974a..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01144.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ce697413b1d9d1d67a7024268d022e4b8d1e9260480a7b2f7802f6afa831f1be -size 2195761 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01152.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01152.webp deleted file mode 100644 index f7a29c8c3e013aafe0c9570b092441caeb9a1e64..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01152.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:979fbc1d38f8c490b9c7f57d194f0050bf85d9b621a196eb9ef636b8ab408b56 -size 1899272 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01164.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01164.webp deleted file mode 100644 index 450d8ac2e3c73c805f2c9521f87df45c0a986e17..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01164.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e14457020fc37ad50d9e6fd9938e14296be3bdb67f1af70ae4afb66dfac18dd0 -size 2349834 diff --git a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01168.webp b/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01168.webp deleted file mode 100644 index dbda7bbf7f354180b21dc62b205a21d249042a96..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/Cam2_2309071202_0012_Natural_Looped/frames/01168.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a6f17ca9ee7b9164ca13ece9fc4ef5272d0de61eb741dc8ff8f083d327c16827 -size 2411354 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00007_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00007_yes.jpg deleted file mode 100644 index 6ed59db7100fb2f84916e67e70baea4271571207..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00007_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0c70511b53b2afa8b5effe76385f878758ac5a8b3b980b1ec8257bdac890920e -size 53202 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00017_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00017_yes.jpg deleted file mode 100644 index cf423058d386c075588b066fafa6d4a849b7f5bc..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00017_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:78bf424f14057f887dea8131cd65d8f99c55934af03f686c250d1fe7932f6ece -size 53277 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00021_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00021_yes.jpg deleted file mode 100644 index e4de1b168fde2c1b3a68847f03e889f5bd73e208..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00021_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:992c31940dfaa8359449b0cd7be2a57de36d46b03e30b528b708f127aba05232 -size 53610 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00028_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00028_yes.jpg deleted file mode 100644 index 6ccfb4efaf0b520fd86cb73f0026ef74a68d8043..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00028_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3b587363e7ef0d7da36c9090986474729e0e9958c533b9d18b58383ab4d8a53f -size 53933 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00031_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00031_yes.jpg deleted file mode 100644 index 9694154b9c23f6fbc91bbda18ed64f6fb80855fa..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00031_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c6edda8475568752c11de23262446a7e2201edee418c91525ec0ab7070b57e82 -size 53391 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00038_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00038_yes.jpg deleted file mode 100644 index 867120ddf94dfbd29ab9a1d3dc03ea4461ecd896..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00038_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3295b3cf33d05db03d3a8bd309ef79f7770746cc5d495f1cbe5fc4d46d773ea3 -size 54688 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00065_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00065_yes.jpg deleted file mode 100644 index 33db59438e84e5380cf66803442a0822be21a2aa..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00065_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ce2a4d20b7b0552ac01e3d301d6883e246d9640cc42dff71e11e7fce014c0c87 -size 56169 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00075_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00075_yes.jpg deleted file mode 100644 index 6ecd22e8321279db526b53e292bdd20887e95612..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00075_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0c6376bab5a0d4eb7c2ed3ca927a86d1d1a20f4a24dfeb7d19eaf9317f1e98b8 -size 55896 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00081_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00081_yes.jpg deleted file mode 100644 index 09d12575394ff155851e36cec76af49009a9ac2f..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00081_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:df139e223c63c235d76dd42f0ee7c8fa0ad718c9bbb836f7cbfee3489da2229c -size 55524 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00088_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00088_yes.jpg deleted file mode 100644 index d67b4eb0b905c575ab8b3581cd18db9af73fd09a..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00088_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:eab3d7b3a731db33a46c3245721576e7f9a21675d68d93cbe92d3ea6c0aa0d60 -size 55326 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00091_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00091_yes.jpg deleted file mode 100644 index 3ca072a16c4bca44f89fb9b04433b71b8dbd4a0b..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00091_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:28ccc3bc20bb8016396a706f57e11650c681d4dca55ce809d191879951521656 -size 55352 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00098_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00098_yes.jpg deleted file mode 100644 index e788f06504765d6831b7efeaafd48f2ea2ed7114..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00098_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:af74a790d4e063d7212ba228f4ae5adfd3355bc565182e00a7129fe7bd6887cf -size 57281 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00122_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00122_yes.jpg deleted file mode 100644 index 6422dc531e702ff28c11e3a89dafb7f54a5cae0e..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00122_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:365540c9653faa1305f0a83b9632c5eaf11cb99027f9a44982a9ac55a057a961 -size 57632 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00132_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00132_yes.jpg deleted file mode 100644 index 600ac4353802c34614e9ec24531387a2903410bd..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00132_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:826045a60b0851ee886e80cec2116f943816464985f1055312ca86549750315b -size 57468 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00140_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00140_yes.jpg deleted file mode 100644 index e53ed0b7f441a28eebdb26523d94d1c4ddaa0b86..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00140_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:53722e901f7bafab9b4f2af739ab11efd0e1219c4c4c4a4c6c3018c3afa51ec1 -size 57729 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00149_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00149_yes.jpg deleted file mode 100644 index b409c861f716a02d385655a7d98a3e67ccfa86cb..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00149_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:62f90c36a99edde1e4e086f9c3140f47791e399de832352fc2fbfc1488657722 -size 57332 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00150_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00150_yes.jpg deleted file mode 100644 index caa79ae3532fa65909f1ca49c6ce437fc75c0d73..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00150_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:69a77edc7c2c2f04df364933157845f83b0554bb6e83b8754d48440c04e96072 -size 57054 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00159_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00159_yes.jpg deleted file mode 100644 index cf684026e0ea4deedd29dc5f0d385bcfc95ec7dd..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00159_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4f6e73e23fdf548a5ed7c8166a08d4edfa2b062eb8be05af45c6a39a0041ebe4 -size 57765 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00182_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00182_yes.jpg deleted file mode 100644 index e9b627d116c0055db5d7cbe7c798b50c0e4d4773..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00182_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:46c3428ad2d1c49f1dcd71f6c0679adc385a6316548e3d267991678b72c4ed23 -size 56989 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00192_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00192_yes.jpg deleted file mode 100644 index 0ddc7acf3f4f99ec29a4dfc7b89b460f0f016707..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00192_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5ae549d62541759b2f8a7699220d6b293adeb62a8bc75e977051a2ecc98ba896 -size 55878 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00209_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00209_yes.jpg deleted file mode 100644 index 86c3b8a5e31a6e5cd8227148f795bd8687412fb1..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00209_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f7f35368ab86db1e24f94664564f0ee4fbd480b9dc79403e8ed96e5a3d5fff19 -size 56765 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00219_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00219_yes.jpg deleted file mode 100644 index 8627b33682b84fa21bf0fe85794580b4f03da20f..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00219_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:15b94260d9116b2ff2a741726afd055598e4f918b83b42a590a810bf02517688 -size 56931 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00226_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00226_yes.jpg deleted file mode 100644 index 1e456a1f0c66dc775e8cd50845c9017b7fe48516..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00226_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:cfdde6514f03b167ce984561d8fa82b4430e339a99e003408171840b5ea53b57 -size 56768 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00236_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00236_yes.jpg deleted file mode 100644 index d86eeb0f69f119100b233dd80be01c5131e29576..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00236_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0dbb06c3314b8d903c545f25e506d75de96514f2564ac8731fac3c2042b1b1ca -size 57290 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00244_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00244_yes.jpg deleted file mode 100644 index 4d95c395633c08d58455107c9219bd6adbd5595e..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00244_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8e25465f5a472d1ba632e67311f093422067e0c0090afcf764a738170553ca8f -size 55218 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00254_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00254_yes.jpg deleted file mode 100644 index ae0dfcca4058bae49c8ac77859eb79a5b0f57897..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00254_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:60e7ee57f2cf574bea22b0e59863bb9802899abdbd7d3dc42b78f9b3a1872678 -size 55716 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00286_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00286_yes.jpg deleted file mode 100644 index f9b986d0ecc703001b1ce57dd2de3341040ccaac..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00286_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3287b41e5c2dace2c58faa843d00cabf1627314cc9df35cbbc81e75052ddb977 -size 56773 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00296_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00296_yes.jpg deleted file mode 100644 index 567aea2565a4d160e6ba8c82e95da9590b536b12..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00296_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5a6b6e2336cc405dbd3b8d336f57790ec2009e1c73aad831f4efaaadc451fc24 -size 56583 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00303_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00303_yes.jpg deleted file mode 100644 index 0dab0d6d98dc9f3f2788cffa860a946f60ba5495..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00303_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0dc962fff61bcc220cecbd9297494d9e4d7e9deac1d09a93561515efacf58c38 -size 56227 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00313_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00313_yes.jpg deleted file mode 100644 index c653768ce01155237eb004f3f3cb77455c0caeff..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00313_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:73873b61e3eb8428c77c0e8b72a79aa44edc2e92e00bfdb6f63041028e3d856a -size 57469 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00347_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00347_yes.jpg deleted file mode 100644 index 234a19542c0dec396ddd24a0d7edf1b3177028f5..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00347_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:751bfa3d6b4231ab803c967180c7aae8dc037d8cdb2e0c00a851d1bd80c514d9 -size 57354 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00357_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00357_yes.jpg deleted file mode 100644 index c8cf0b6e8246fae84708833c1e68f857e82249ec..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00357_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:bb97704e9dfcc6d5dccbc615f7c30e3beb7158d4bb9457077f35c3879dac47d0 -size 57701 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00361_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00361_yes.jpg deleted file mode 100644 index f25b58b42ea7daa93c97035539da96002157549d..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00361_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a4363cdbb7ff2d232c77ab7c8a7e151388bd9aace8e522e9797624de9c7bfdc8 -size 57464 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00371_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00371_yes.jpg deleted file mode 100644 index 7288dde51016fd003fd788a91cad6a7412be54ba..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00371_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3d4dcf57307d7b828552378cdb08d91a63ad257221ec7860c604a30a5cc29894 -size 57875 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00385_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00385_yes.jpg deleted file mode 100644 index 85105015de9f5703d83368509e079da26220809c..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00385_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ca21af905aa5a62f03da26025f81784d493543b1d9a88f3d2c2c3ad04db2016a -size 55549 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00395_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00395_yes.jpg deleted file mode 100644 index 5d78af4e2be462841a5c9765e0edb37d878e4e3d..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00395_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9b0bf99cceb682a6f0a521dafc867fa9b1dfeadb1427ed2642b2cc84220354c9 -size 56652 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00401_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00401_yes.jpg deleted file mode 100644 index 4ca81b030034208991de33243b7e9594adeb2df7..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00401_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e34e03ef8b04790664f8d2483f558a45bbf1c7c84a7ea67e483c97433e877c90 -size 57592 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00411_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00411_yes.jpg deleted file mode 100644 index 58a9f58314c067d774a9960ed416b47b8dab3883..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00411_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c31c65e9dd415b66f41dcdc18a91cebf91309404270fef7e0005fbfd966a6ed1 -size 57050 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00463_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00463_yes.jpg deleted file mode 100644 index 5dda1af8299b63a914b2f66e3fa0c7b109eefcd4..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00463_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a29db450a517e169529d5a107f1e9bcab211267dfd6f642837156fd266e96ef3 -size 56784 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00473_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00473_yes.jpg deleted file mode 100644 index 13648dec1f1888d155965a9dae3bd41bb0f2abe0..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00473_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2eb4570235d9990cc3fe97abe23b43328b042be546296b08c0f08c22508aa681 -size 57451 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00487_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00487_yes.jpg deleted file mode 100644 index cb0ff8d16a85230249854e03087204fefdd58493..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00487_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e08511ce5da8f17710db0b787ca6f7ece26a079f83ebc3999cf023c3a1534e74 -size 56533 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00497_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00497_yes.jpg deleted file mode 100644 index ac5842a5ee8d33456dc94c793827d65bcc7bba22..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00497_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:cc23d324339e7df79e4a59e822cf538528ae35bef6a1f7a05db5c82e977a2894 -size 57049 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00502_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00502_yes.jpg deleted file mode 100644 index 5444e369ed242b3f2a0662c8ad0d4c0e40e5e75a..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00502_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:497e8511df20388cbe0ffb2230671252dc2d86f2f0bf813deb615686500d885e -size 58245 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00512_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00512_yes.jpg deleted file mode 100644 index 39ddd308991ec8559394d09aefa7ca185a3979ea..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00512_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b6f6a8d6c0a34f17bad322031f5cf3fb6765e157657bdf07746a2c8946a5cc34 -size 57424 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00524_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00524_yes.jpg deleted file mode 100644 index 51cf743356b252cfde3b7b397d608e2c7bb805e7..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00524_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:809312cf678f85423bea8917131aecb2f8ac398963b70594288988ecd29425b8 -size 57410 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00534_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00534_yes.jpg deleted file mode 100644 index dc85a50aabe69560685b9da534115e164837b0e8..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00534_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2428da721840b354003dc30789b4b7741ad8f633691b7b2ad10dcf81464fe5fb -size 56442 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00546_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00546_yes.jpg deleted file mode 100644 index 750a44a76ce52f4229acf8dec4c854c93e55d2e1..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00546_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7f40e34f7d4d69ef724208257887a2fbd4ca9f96ca214b2db9c434034ce98e1d -size 56757 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00556_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00556_yes.jpg deleted file mode 100644 index 454006ecb9d67d3e55ce1d9d4ed6f6f22d65f537..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00556_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e43402ff02785e3b0fa695bcd2281d70a5c813235b703e1583d9de41dc032714 -size 56896 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00569_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00569_yes.jpg deleted file mode 100644 index 39c814bc4f96a85cf73bec0c9f763f7e3c988628..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00569_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f5afa30621c354044773661af7eb06a5423bc3c7743fac8dcf426dc5bf8bfe20 -size 58317 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00579_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00579_yes.jpg deleted file mode 100644 index a5bfe3dc714ad8f0a7bea94f1f28a9d750894471..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00579_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3c14ba865509b0be23e6006ccbc95e43cd9edc85bdd44c32b47ba2ed99c5d43a -size 56546 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00620_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00620_yes.jpg deleted file mode 100644 index 5cf348da972a8f84e1c3b43bfb1ae57a379464c9..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00620_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5b6aec2a25f5c79df5ff01b1588a7026d284010bc1d6a77b4d00d902a0ef6111 -size 58037 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00630_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00630_yes.jpg deleted file mode 100644 index eafab792ead4dd7d9e0a3cee0d352b00fb8d4bec..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00630_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:32b589ab64b753f67df6a001f726f343c5fcc19b0dcfc6e60ccbe9d38d5f22c8 -size 57409 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00642_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00642_yes.jpg deleted file mode 100644 index 9011ffd47a44090645d915fc56b1c6ff15233014..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00642_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:28d1ddbc32c23cb4436553689d4efbd6c72cf949516418774d42397208725501 -size 56543 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00652_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00652_yes.jpg deleted file mode 100644 index 8b4ec2332f3bd16c3c331131b9a7ab6f83a468b2..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00652_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:fcd4a5a193d9ede076e03a92064403669ad3a547d8c8ff643b1987eddce6d40c -size 56581 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00664_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00664_yes.jpg deleted file mode 100644 index 60ca719c8c98becfd109c0fe6c03b44e647a6db6..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00664_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4a78310ed60d5169733ce4c33a5f3943bf932326e33dbaf2395ca848cc867cf5 -size 56641 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00674_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00674_yes.jpg deleted file mode 100644 index c79ad8f7f3127ec4f061a1a20aac9c58f9fbc426..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00674_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0b3d07c0d4752cf35d9e26f5d0df71e495575c2ca0a78e8eafdfa747ddb65c0a -size 56349 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00689_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00689_yes.jpg deleted file mode 100644 index 2e9122ee8a074656018940f9bd4356c929a9f843..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00689_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0c52f826f001a2fee124155da025dd94550e67eea172236bb210296f702d058c -size 56287 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00699_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00699_yes.jpg deleted file mode 100644 index bcc2e09d6db272b24764c1cf48f81dae4439f4fc..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00699_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:fd1f417e4411afe82af7b20d90d345cdba48e578f1693e7cd79f974a4da649ec -size 57193 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00705_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00705_yes.jpg deleted file mode 100644 index 9a00edd9ff8efad8514b629f79e018897dc95457..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00705_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a0bb9fcebeb655b54080c3e91b5ed1653a521165b9d3105fc503997b61ad4172 -size 57524 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00715_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00715_yes.jpg deleted file mode 100644 index e24c0029c352c679ea49f4240158ab36222df745..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00715_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:17e7d30547354de21ea9fb516a1f0f623d1b3aa5ddd1650e847cf2a24ccea498 -size 57398 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00748_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00748_yes.jpg deleted file mode 100644 index 466dae52528c1d0b7c114c27eca344c2e9ccfdcf..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00748_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:114870d5a793a93e1800ded104102786d3c11951949f78c949d4494023b6cfe5 -size 56307 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00758_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00758_yes.jpg deleted file mode 100644 index 00693526eb6820931b9b5685764b7c53fdddb916..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00758_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b944d8f6c64516cde04ee0d214c40d700e4c42f1ecdc61339de634adb04421eb -size 56056 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00767_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00767_yes.jpg deleted file mode 100644 index 26aee9d72b34793ce6ed18467b28384cf423932f..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00767_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:817d2034a1263735a7278e60a6ca58501b19b95ccaa7eeb4e754e1b154a65e76 -size 56944 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00777_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00777_yes.jpg deleted file mode 100644 index 74d633c805bbb2d14c562fe716829147b45ae9d2..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00777_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1203248016f537ccf77848419f02fd398a55a945c73c75255870df3a0f1ca354 -size 56001 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00783_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00783_yes.jpg deleted file mode 100644 index 33166281cb5fd515dc559db92be8596ee12e09c4..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00783_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:af56f1619961bc9b07dca813569cf09cc945cd69aec59a0e60c86f383443cd81 -size 55882 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00793_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00793_yes.jpg deleted file mode 100644 index 0bba1d19b57fd2068ba12e95daaa5e1c72019309..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00793_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:72b581f1bd19848305ee9e9fce24aa54a0a470bbac958a35b14db48b62820030 -size 56390 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00802_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00802_yes.jpg deleted file mode 100644 index c1c9326be79a595f513c860d7ba2579c20815290..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00802_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:51a743059780424d2dde90cf298bcb9b6be3819710a298224cfa87f694a1bfaf -size 57028 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00812_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00812_yes.jpg deleted file mode 100644 index f07e9ebc2e879c57fcd5843c5553115ceaf917c0..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00812_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:57e6b46bf80f7f7d3748d526a203a24c532cdad0696beeda76883a9996ef6e32 -size 57705 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00860_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00860_yes.jpg deleted file mode 100644 index 2333c6e2fbd82b6273267d5ddb879f16962d527b..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00860_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d8b9211371b9fcfa28d68853be69a9a69e5cc05879f284f70970a4e0e979bff0 -size 56787 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00869_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00869_yes.jpg deleted file mode 100644 index 20784fb4d3a7ca0e2c7ddb01a3174706e9160d91..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00869_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:caffe20612690931b72d038b14de3fb4660efde09766f775df0cbe8a7d404d89 -size 56874 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00870_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00870_yes.jpg deleted file mode 100644 index 9524bac97eee5bc5c1ce491999dd6397d7454d30..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00870_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5fc1e8897714e4e8916afb3088dc041fdb4999b0a7a14316624cff269abd26f0 -size 58120 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00879_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00879_yes.jpg deleted file mode 100644 index ba098df77f02fa5d703d07e4796e9471690f5ea5..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00879_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ca4335079e37024c31a4c74e8b5bc96decf22e096c085ed31aad05a0f740745d -size 57272 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00884_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00884_yes.jpg deleted file mode 100644 index 3915af5036cf044a9e271dc82c66005796ebaede..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00884_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6da88ae40ee2e8e488ec5e4f0cf98e329dc3e49d67d646d323e316cf5b115b59 -size 58322 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00894_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00894_yes.jpg deleted file mode 100644 index 5191342d3df2abebf434c6098c34f34c5b0fe8c0..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00894_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ead4f679708087616304a72d3066f207ece16f6f652db5cae398844d9f88ae8d -size 57413 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00901_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00901_yes.jpg deleted file mode 100644 index d264dfd95ef16743c16781f3a3f226a4f6c27e4a..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00901_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:64999957721c48e53b95666f30ecffe32923ad1fbc16a40b78aa80d01a1f3e89 -size 57192 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00908_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00908_yes.jpg deleted file mode 100644 index 0565d18c3dc45b42eb2f134f7237f0e12f06ede9..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00908_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b344bd8c369f3758543716ea638caf9223fd413ddcb20120102bba190c65124a -size 57801 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00911_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00911_yes.jpg deleted file mode 100644 index cf3549c23a0f20ae3afa931d365f7d6764cdcada..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00911_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:68b0a0f3afc4c4523b753cbe5073ad58ee4b1235aaefbbef59cbe036af0780b0 -size 56733 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00918_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00918_yes.jpg deleted file mode 100644 index 53ebca4465c8158e5692997f03713c537986bbcf..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00918_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:dd6177a97cf0e97f2ca4d9eb90f77d581e7fc11b137a1bcfb8b2e04e10d2e333 -size 56399 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00927_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00927_yes.jpg deleted file mode 100644 index 8b7c0471fdb9cccefa98344609cf76b351da491c..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00927_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:624d7734733bdec73724c8ecdb3adb421028dba6cc5e5cc78dcb722a15b1c841 -size 57286 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00937_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00937_yes.jpg deleted file mode 100644 index 688a1d8761be205569bad8db74e853ae18ec1941..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00937_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:81b2ecb6329d7a61670fe800c5a4fd8b53a586abfff4aba47f420e45f910791a -size 58182 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00945_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00945_yes.jpg deleted file mode 100644 index 9842ea651a8cd6fb5fd7278a086fd731d21cfbab..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00945_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f3d2513c1217ca3b8ebdd9f811b98b3df052d92050dd55bd53b77787fdb99764 -size 57348 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00955_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00955_yes.jpg deleted file mode 100644 index e30a0bba62240d8e5e8b3d82b19e32bb110609e2..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00955_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:afb0a2dd639487628f99d8e5c5b2269458443bcc95d49b55cc27799973a279a5 -size 57632 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00963_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00963_yes.jpg deleted file mode 100644 index df1a6d8eb191a6607f174bebaf7d1866af45f4c9..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00963_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0f8b626a0ebe62c64dcc6a2de505d3ef372cfcc35fb88cf9ea6fc42cd384106e -size 56347 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00973_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00973_yes.jpg deleted file mode 100644 index 12a83b269b6343b7c2fab464956acfeea3a5dffb..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/00973_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a5e215f4e1a8e945436830ba98d92d5b260ff0d81c83e47f35dc9f4c96678872 -size 57002 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01001_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01001_yes.jpg deleted file mode 100644 index a1230c202a0e60f9d7497670c8bec4eef331543a..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01001_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b4e101ed0cdaf2115e25d0ffa9d0d5d1b26f950c2a1e792f255f3bd0997a7076 -size 56680 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01008_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01008_yes.jpg deleted file mode 100644 index 0313930ec9908b6266c12c0d2201599c3e8ee98a..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01008_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e6fdc5585f0b9f739bdbc73fa6b5ff0af1b2ef0841ca627bac6d703b75d2a8ff -size 58690 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01011_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01011_yes.jpg deleted file mode 100644 index 3c980df9db1b74213fe7f93baaf2bd1ad984fb79..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01011_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:fb11ddaa05e4feacb272ecb93e6e72117c5d7f0ba229a9a610715b64d8f76434 -size 56880 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01018_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01018_yes.jpg deleted file mode 100644 index e58befb42ace7720de646fa0fdb1f59ebe8a3f52..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01018_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5b2f72b7298f5dc4961201fbd667a1d4d0395c4d87ca953642310a7626a8fdf1 -size 56316 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01063_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01063_yes.jpg deleted file mode 100644 index 42297439362318e06ee2284fb0a24a0df4b8a2cc..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01063_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:06d6a745779ca773865c764ffd396e8e1f374e238980e3d69a193e3aec8a8a18 -size 56467 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01073_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01073_yes.jpg deleted file mode 100644 index 90b522693c6c8f1d8eb45a226d45bf05eeae2525..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01073_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9dd6413cd3d5cf85e7f9dcc3a39d80bcf253ec196061436238690d5eb245e7b2 -size 57533 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01087_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01087_yes.jpg deleted file mode 100644 index 50ce969196313a7bcddf9fccb3479ca14b129ec4..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01087_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:785b69c0cd90953ba72b1929128f38355c6824b245fa7e9a6029700c4820265b -size 57370 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01097_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01097_yes.jpg deleted file mode 100644 index 8365b4b7d844e504e3791c9ee1af72f021076f5c..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01097_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9c9525d3f62e85dd445613447d9fe759da4be9c26083ec470e83f34b364157cd -size 56933 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01124_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01124_yes.jpg deleted file mode 100644 index d4e10b7f9f0d7ddc54a5ae4b3165ec06b44d9e5f..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01124_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:90ad0fb86020f8b2bf18d5a502bca1e715211862b7bf045e265836343358a57e -size 56311 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01134_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01134_yes.jpg deleted file mode 100644 index 2e9c3253c32d8f3e31c6ad1bf8a48da3c158418e..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01134_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2f1ac3279aa5170c2ed6633656de395b3d8221934a1d37127f8c6a19811d4ca6 -size 57002 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01146_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01146_yes.jpg deleted file mode 100644 index e15941bb88bc8f66cbaa1c10b3de6712d984eaa7..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01146_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d4f1ed30f4b0a0175454971021e4c0246c9a1d877b5fdef92c1a9dff4d9adb45 -size 56900 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01156_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01156_yes.jpg deleted file mode 100644 index 46f00dcef6a06b333671418e3a8214322aa2cf72..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01156_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d37023b54fed50ec10ffb7c585e3f56aee3e0fefb05e15ba07705e97c92584dd -size 56155 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01160_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01160_yes.jpg deleted file mode 100644 index 9b4fe03c4300392faa0e3b7750286656f32c377d..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01160_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6483d8cf745310cb10596d009a06cc3f42495070219631fb231482a125e20e44 -size 57243 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01169_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01169_yes.jpg deleted file mode 100644 index b667736394f3e5842c2fa6ca7fcacdd8171069e7..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01169_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a305a0ceb1e85271f845dd7ae873e1cdf49ea43572ce2623ce34fdd3455c9009 -size 55779 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01170_yes.jpg b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01170_yes.jpg deleted file mode 100644 index 5214d3e80a2ec8039d74703be71301e65c1dafa0..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/01170_yes.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4d3d1db672abd3444870c3c01314b2307522cb768318a526ae00b45805955b4b -size 56552 diff --git a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/audio_debug.wav b/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/audio_debug.wav deleted file mode 100644 index f7d5506309986c10bbba69a066f8498941aacee4..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/crop_video_Cam2_2309071202_0012_Natural_Looped/Cam2_2309071202_0012_Natural_Looped_000/audio_debug.wav +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:82ea18945f429c24a366913122c22410e69ac82bba04d3f16143d39b44cc58c9 -size 8478378 diff --git a/stf/works/preprocess/Ian_v3_front/df_anchor_i/Cam2_2309071202_0012_Natural_Looped_000.pickle b/stf/works/preprocess/Ian_v3_front/df_anchor_i/Cam2_2309071202_0012_Natural_Looped_000.pickle deleted file mode 100644 index b862dbcdb5697679fdbcf565b61f2ca94a4edeb9..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/df_anchor_i/Cam2_2309071202_0012_Natural_Looped_000.pickle +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:410850fc627f0c2cd6699f1825ccaee3e4a5ee26d54708e3bcceeeb511fd553e -size 31559 diff --git a/stf/works/preprocess/Ian_v3_front/df_face_info/Cam2_2309071202_0012_Natural_Looped.pickle b/stf/works/preprocess/Ian_v3_front/df_face_info/Cam2_2309071202_0012_Natural_Looped.pickle deleted file mode 100644 index 7aa0d44bddf6c3f91243c7d6d9dbc7ccbe24959b..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/df_face_info/Cam2_2309071202_0012_Natural_Looped.pickle +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c73c0b3704e01331bcd7ee7a0e5fab0d55823d9158f8b81fa46fd3dad3241ea1 -size 117457 diff --git a/stf/works/preprocess/Ian_v3_front/metadata.json b/stf/works/preprocess/Ian_v3_front/metadata.json deleted file mode 100644 index b7475a28022188e89df0ee6fa159f1f266826b64..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/Ian_v3_front/metadata.json +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:615681285dec4805f5dd7886b7ee1c9a348ee0bb8efc38e91841bccf835bdccd -size 42 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/.ipynb_checkpoints/metadata-checkpoint.json b/stf/works/preprocess/nasilhong_f_v1_front/.ipynb_checkpoints/metadata-checkpoint.json deleted file mode 100644 index 87592de56e2223871951953f1f45072da2f0c7b0..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/.ipynb_checkpoints/metadata-checkpoint.json +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:06c2997fe0bba05ca3922fd09d0ce435fb175d94de722d4027a6f82bdfeb6946 -size 42 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_golf_wear_nodded_cut/front_golf_wear_nodded_cut_000/crop.webm b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_golf_wear_nodded_cut/front_golf_wear_nodded_cut_000/crop.webm deleted file mode 100644 index 1fb3a869b3866a3dacb3d71fd0943819c563ea32..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_golf_wear_nodded_cut/front_golf_wear_nodded_cut_000/crop.webm +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4a8c564f9976ca80a307fe69fc52e98ddda636e13b4f9fbb57a22536c84681af -size 8992978 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_golf_wear_nodded_cut/front_golf_wear_nodded_cut_000/df_fan.pickle b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_golf_wear_nodded_cut/front_golf_wear_nodded_cut_000/df_fan.pickle deleted file mode 100644 index 963ef8f35c6194d4014b70adfb0d938547247932..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_golf_wear_nodded_cut/front_golf_wear_nodded_cut_000/df_fan.pickle +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:71b81aec58f70612291e08d593e5d936c4a80e92ee47b6206ab9a3ce90960e22 -size 3326555 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_golf_wear_nodded_cut/front_golf_wear_nodded_cut_000/df_fan.txt b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_golf_wear_nodded_cut/front_golf_wear_nodded_cut_000/df_fan.txt deleted file mode 100644 index 33c3a9283e9471800894e32beacc12c25fd6af75..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_golf_wear_nodded_cut/front_golf_wear_nodded_cut_000/df_fan.txt +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:aee408847d35e44e99430f0979c3357b85fe8dbb4535a494301198adbee85f27 -size 7 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00007_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00007_yes.png deleted file mode 100644 index e00fd18c5e42be024a4b85a02a7d88a45c93818e..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00007_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a0142c145d1e19ec12915c5207151dc571a0bde4ffb20c3ad842af9d65813d03 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00017_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00017_yes.png deleted file mode 100644 index dac9a40dcf851be6221c25ccc968baab1780867d..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00017_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:acb3bbff36722128962bd91db2fa9acf8a9c176220b9a74312e90aa42a4ad999 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00028_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00028_yes.png deleted file mode 100644 index 1bfe965df1b9454e26b10e2b177c66b9d78bdb26..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00028_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:937fe6c4d852b5ae2de919ec625535a7f3cc90d721ba7258479f21f0fc8983e8 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00038_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00038_yes.png deleted file mode 100644 index cdf1c6015d97e326c5a61cbac7aff7dcec170ce8..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00038_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:64697b8e32db36fd7bac42cc62d7e4c32d715412464e764986600f92fd98c882 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00065_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00065_yes.png deleted file mode 100644 index 9d34b7873dca194fd44c47ee36a0707003e6e913..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00065_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f3f1cdecc9d10fbb76f9263d99a6f00563de4ca8e4e1fae98d0934c4472042e9 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00075_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00075_yes.png deleted file mode 100644 index 5d9445ba1ec10fc69d8992e6ef728a9f1c4b5ea7..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00075_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f114d0e40d528592f7ecd30749941a28e0395027efa40f86cc630ca7aade43dd -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00081_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00081_yes.png deleted file mode 100644 index 26ba6018b7b2afe4ce61f784b0cb8eeb8fb068d0..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00081_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:52d2a31afd91a67942dd577ba68308224b12aff677e07b0e4e223c640fe472b8 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00088_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00088_yes.png deleted file mode 100644 index 94693210b74af78d04bf61952ce811cc582740bf..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00088_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c6c825e9669124b90795ae23f081ffb45853990348a150e23103a0347b23a1d4 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00091_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00091_yes.png deleted file mode 100644 index b1d9d669b29a01ccd1d68d9b102cb13847aa7072..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00091_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1e97a62a195284bca88bc320cd3d4262a40a75fbb86a8f79ee8590f22d3fb62f -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00098_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00098_yes.png deleted file mode 100644 index f635d0a9e0bc94c087ec7c8ea817ad1d7a25ad96..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00098_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:230a08185c84f3f879db3f3b0fabe03b67fe6a862c2f5d23580b4581ac0c9496 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00122_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00122_yes.png deleted file mode 100644 index e12ac92f8525b0b524b26ace93096f0674327a62..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00122_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6c0e1eda23a525f15374d3d5492b855fd2fdd916facc2950aaa38a29406072e8 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00132_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00132_yes.png deleted file mode 100644 index 33270fc5c98452e5405c0a03a60325f8e6e1f324..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00132_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:12ffb1f1ae227e040e007cadc66e0975521746d93ecc9e9e1d83e17d34fa3afe -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00140_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00140_yes.png deleted file mode 100644 index e1bb013022fd26b1959898f2d2bb2b51962081e8..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00140_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7265fda9534ccb082c7a97efa987b2503aa661f24caaae895e1d86cd5934ff31 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00150_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00150_yes.png deleted file mode 100644 index 8fd1c05effe0a49d0ed5b8f3d1f67db3d19d881a..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00150_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ccacb9277d6ec4d6cf2dfc6e9f3de9b2eb82b43d7ebef6556d2d43eb061cc51f -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00209_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00209_yes.png deleted file mode 100644 index 8d4fc38d84197d35b1c27cbf92f87cb886891c8a..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00209_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:81eeab5efb9b4c3a2ebf2af3b8a7dc70e8bec317df41dca0ffb52f4a424ed90d -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00219_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00219_yes.png deleted file mode 100644 index aa06edf8e3e90fcdd7b82c063596dbdccbd6d80f..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00219_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0896dcb53b34f571ebbe9fa36127e48a093afcc01969a92b84bd109832d42e93 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00226_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00226_yes.png deleted file mode 100644 index ca819ff8897262fe39482e425578e988d9e49cd7..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00226_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:36701b4bdeac328f131e466d44e5d7cb70ab332f2aad6e13d93a13a6529b3dcd -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00236_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00236_yes.png deleted file mode 100644 index 3417d93f08a3dab82f64b81aa98a938ed6a9dcf0..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00236_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6d66f2165931b8e1903a28d9a5827c95672ba9c9a58e61f085c7e52fe8d39542 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00244_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00244_yes.png deleted file mode 100644 index 3613d327c8c343ac1e2c62a44aa154912a07753c..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00244_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:12da03619414cf61921f3ce93cae5bfa689e19bd2d04d08138615f4f2d422f69 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00254_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00254_yes.png deleted file mode 100644 index b4411bafebb73e6a1a032ca93f34c9cadbac95c6..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00254_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e72732bf07d9e42fa9d674f7886a2f2c01b1705e1bbd16a00494b6b1658c1d7d -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00286_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00286_yes.png deleted file mode 100644 index 5d804f691b4a2b26bae8dc08e40aab156eb4136b..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00286_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c64e66fc969daaf9685c4ffb64238dc41a8ac955d599eda4aeff69b14980b91f -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00296_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00296_yes.png deleted file mode 100644 index 6531ac3b13f881acbb46527effa00a26d3592360..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00296_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1a4008373a0d8d799dff82732bd4701496b4aedeef2bb32a097be2eb8336a3a1 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00303_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00303_yes.png deleted file mode 100644 index 9dd5a16fefd47e77b5fec70bacee72b8933e8de1..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00303_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:523f8fa6471a81e4f49f1842e3d5c041e9842af3f4240fdc7c6ef3684b36d4c8 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00313_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00313_yes.png deleted file mode 100644 index 72d5582a73f234c96379d51938c9cc466b15582f..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00313_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:80af3697922d88590e90dd7b2bf0926ce7a305a9764155f2ef3dafebe0069986 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00361_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00361_yes.png deleted file mode 100644 index 2607c42abead65666aa4ffc852fd27f6f936f93c..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00361_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:770f8cb2d16b71bbf196095b4b6879d40016c4591aece0ddab97d0125d8b6291 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00371_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00371_yes.png deleted file mode 100644 index 26927043825f2bd31ef493db85b37782c9b72129..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00371_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:dfd73845d65b228f7b2aa631ca676051243bc4c7fcb849e11a18daea72a6e12b -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00385_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00385_yes.png deleted file mode 100644 index 56ac7d1500df7395e39f1844c818ac803e5d0293..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00385_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2c368c42ba35fa001b805275cccc956d99173e2b33e30659bcd35f09e133b7f3 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00395_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00395_yes.png deleted file mode 100644 index 321c07c6323abee9986630a7ff70b2d73492f867..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00395_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:29a66f8a7ed77b3426509da9a7965e71148e1e20c387661f3dfc6d5638519c6d -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00401_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00401_yes.png deleted file mode 100644 index 91293c2a80f508ffaced91173e3feca2347d394e..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00401_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:dad34613afb25d4a1ed217503c72da10f2983a64663e3971cfa980cd975b0aa1 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00411_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00411_yes.png deleted file mode 100644 index fc1a54a76bad200a8e6722b934f83f30feadbc10..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00411_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a3e64330127253c17310078037bfe473c7702b6d18a10747f70d9fd18c05365a -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00463_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00463_yes.png deleted file mode 100644 index c7d564c57af8e02fa9754d82a80e17d3bb7d1e9f..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00463_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2636611bf13a59bb4e12b45086e8309b03da6346f243c04c157830e58194a09a -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00473_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00473_yes.png deleted file mode 100644 index bb31a8afe65d0643491f08b2d6469528f83d56c8..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00473_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7458fa9ae853f91f4663ac0d592bc8b1460d74c8c716f2ab031425e93570c587 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00487_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00487_yes.png deleted file mode 100644 index 4bf641656d64e1cf8bf9b99cfbba76cc4da26846..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00487_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a139d0c982f160968472ee8b4c55615ace194af75c33c7f467aec7c457868a2a -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00497_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00497_yes.png deleted file mode 100644 index a429a7bda74b9f1b699f85c01d44f4943dcb9fc4..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00497_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:282705cf722573f2495e0f50108a25145370b9a774f38b2c9ae8e3738521ac92 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00524_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00524_yes.png deleted file mode 100644 index 29544beefe067c6cf12eff8c4a498c0ab9e09e7f..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00524_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:eb4961745cde6624c0ae58d86bb0a4f29e6bd465bb549e4af85bf2225bb51a0c -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00534_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00534_yes.png deleted file mode 100644 index b3aa2bf4d76678bb57bb0c16164f6bc6de6e8bc3..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00534_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:583c1cd1f112750efbb8a3cc151ab45b84d52e73aa4c5fe2fa36117fd847c7d9 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00546_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00546_yes.png deleted file mode 100644 index 6224a61db9b34ebd3d576dc00e20951dff808335..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00546_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f8f32db37f6ad8fddb84ea6f6eb5941aebbf9ce48f61d8a3d5719f51bb47bb6e -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00556_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00556_yes.png deleted file mode 100644 index b7cfcfd3ffc0113a14152cf36cce09bd4dd3a073..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00556_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4e745a0a8e92d94ac71cdb6fb697e56bb55d12e7c351d5e147348efa243f7253 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00569_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00569_yes.png deleted file mode 100644 index bc7827fca9ca49d012166362d9b151af30bb3537..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00569_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:52fdba61866f52abf18d35865e302fb7b61e8b1fdc09d04d628c3035a248e6d2 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00579_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00579_yes.png deleted file mode 100644 index 96edcc7b926111b6981252fa5c5e5cf8b115df49..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00579_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4970330e5a02aed708c7f0746a4d2cb833c45ead79a629ead326fd8664a7ae37 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00620_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00620_yes.png deleted file mode 100644 index 73a7ebdb09fc182119da806dfbb9f7a96e2de3c8..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00620_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8647fac42f4481933d246adb8e052333071da736e803fa99a87b7bfda241e83e -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00630_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00630_yes.png deleted file mode 100644 index 237e6d70474b2409f85b4a9a2ed50463004ef5d6..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00630_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:89dd4848424071b97590d6bbd87a5e0a57c3a9ab186db211c2c069f251ac3df2 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00642_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00642_yes.png deleted file mode 100644 index 28c7715867946de3429d77a00c2de00497314ec8..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00642_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1ac6800f3259a8c76a73993d84cb5a0a535606943feb563bfce95b097dc18c93 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00652_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00652_yes.png deleted file mode 100644 index 898ead855c25066602649fced6f55162b8b1860b..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00652_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2786a673811df8d60e44371902f7db5fbe11c7af0b0079e3016438a7f11b39ce -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00689_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00689_yes.png deleted file mode 100644 index cbe6f262461d728ef5a61b647f6164cdc9b3dfe1..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00689_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e7cbeb3cbbdc5662695c64a53cf41dc70ee3fdf17ab8d19120f82bf116ccbcba -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00699_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00699_yes.png deleted file mode 100644 index 63bfc03015d13b520f301075a2353c7f37234851..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00699_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:abba55841cfd03bc9a3f3d887b2953b22a0c89a4489e8d53f9734cca991d183d -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00705_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00705_yes.png deleted file mode 100644 index d27e9a6073401bcdd6fdd1336cee4562cb214012..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00705_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8d60a49ce6673cfe92db0b13a532099383bb5ba92e659336ae27cf530cb654c7 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00715_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00715_yes.png deleted file mode 100644 index 834565491ce8ea3d04a9d6fcdb04fce382a5a685..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00715_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ff5a5676ec52d51a99d8dcb7ee9532ebb5f2048c40db59867a45f8401136560b -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00748_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00748_yes.png deleted file mode 100644 index e944163e82e23bea027aa737dd126fe47777dcbb..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00748_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:eba26c088f87f8586167cbfff26162de256da6a184a37324a6ff015a6542582b -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00758_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00758_yes.png deleted file mode 100644 index c970df7927c6f38a5353f1f826df26701773beaa..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00758_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:47f9ac0d42facc28212ad5078d90ce09762a564e9a7a7e608f7c399682de4408 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00767_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00767_yes.png deleted file mode 100644 index d18ce74ff46e34520965234c822aeb825f768b51..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00767_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8e94a74debd07f9e8aa7ffae750ccc51dd7e1c82b45696646b6a874b27e44ed5 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00777_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00777_yes.png deleted file mode 100644 index d87975f7ab4aec967dd0f9ae60058dd34fc21185..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00777_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:91666f44a42156ff6e622ed5b30d51264c7475093ea77d64c53dab230ad94dfa -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00783_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00783_yes.png deleted file mode 100644 index 9fec80f218553524b5e2eceba266860f8892633c..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00783_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:79f42a2e3b69dae4a54c5cfcba2295a747b4bfa85536b1e36d2909b567051111 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00793_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00793_yes.png deleted file mode 100644 index db06c7400f23d2dcace1c3363e68731d09737b31..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00793_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a763db91e460cdd5f321d67813542cd8d62613704c42efe9dab3924550d9792f -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00802_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00802_yes.png deleted file mode 100644 index 1f718f66b93632aa97e0b3acdbd8945f750f3efc..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00802_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:97ba9640826d8ef354aba852786d2bf1baac5a9e12cdbb3a8b108f9513885caf -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00812_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00812_yes.png deleted file mode 100644 index 0fda76921e2b998bbd7ac2057be876be55ee9a26..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00812_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5ca4bea290decafdaf9259607bac122601e0bd0cef4c67e2ac7d3d944462af1e -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00860_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00860_yes.png deleted file mode 100644 index 7e079f4d3c6d20e7fde05049a79d15ab17801648..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00860_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:931db092aae6468d6028317874fae70e3605066b6d345958a3e1baa56574d30a -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00869_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00869_yes.png deleted file mode 100644 index 4026832facad937caaad92cc034b6f1fae8649c8..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00869_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4a2244f331dd0de9483285d77d6fc52278f45c29f9ec552713e180539fff0c66 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00870_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00870_yes.png deleted file mode 100644 index c845ec13f12930a9185c11568afcd406aacbd8be..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00870_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:784ff8fb265ba09b2cf2814295ec8d95b89819d09ecfac5b5b51f598f1030589 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00879_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00879_yes.png deleted file mode 100644 index 5442c04b945243f2728c3e698c819533867a61ff..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00879_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0c2896e2e4617744d757f042e0aaaee76fd9cdb8ee146aaf439c63dca904ee51 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00884_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00884_yes.png deleted file mode 100644 index af83c49d738aa70e47c6e31bdc1255b901a63748..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00884_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:aa2bb193c322db9c471996b16fd5b7a7f3c33b54ae39e295398a83c341b0e135 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00894_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00894_yes.png deleted file mode 100644 index 7af9fcead236052493fb5086b357fd8ba3956632..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00894_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:653f3f7cce85887dee60852098115abd6b557bcff281c06b87226071a6b7dc6b -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00901_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00901_yes.png deleted file mode 100644 index 6948bad4360c44c4c05832a8e55152707bd8ef50..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00901_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:65bb4612530922d5709a0c6fbcad4ae2d1704f5e0d0514d57b161ef595ed02f4 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00908_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00908_yes.png deleted file mode 100644 index 02059c9258322076c25e988c4a2849095b7ca24f..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00908_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:27bb693eeac9b599dfc9a791f363085c67940baa8d0abb3d7d933d701f98bff4 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00911_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00911_yes.png deleted file mode 100644 index 806fa46f9659706bd293c7d78ded9c2874809cc5..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00911_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:01ad9ca1453f41ade6b8b31f4761b26289048d7834ae392bb7d1c6713b3098fd -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00918_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00918_yes.png deleted file mode 100644 index 2b98a446b2bf4148cda7f3c583a8c17d42de1cac..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00918_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2f244720f32325a2922877a474bfbe63846c077a7c834fee7cb57ff327507e36 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00927_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00927_yes.png deleted file mode 100644 index 1e62da14421a6449ecfd450661b13f775cf2b810..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00927_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1a7df0d8e6153c9bb1ea75c62470822440c576f0761b7cd10c05d346ed658211 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00937_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00937_yes.png deleted file mode 100644 index dbccc920b54f07774b98431e927518f776bcd15a..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00937_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b2af1cb16bf89130e7b4671db99a53c5cd94de5047f498ff2501f782ab031108 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00945_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00945_yes.png deleted file mode 100644 index ff6ccc4fb0fa5a660dfac337a917fef3c9903452..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00945_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:fdc08afccc92c51404a6a398d930623d8574acb6151646debf543afab4d474f9 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00955_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00955_yes.png deleted file mode 100644 index 7b1a74afdb02e10cb94f145ddf6d7c96a83982a9..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/00955_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8afbca2055ffdccaf333591d6b358865a6de598c0becb5b718833c6cb88fef7a -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01001_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01001_yes.png deleted file mode 100644 index e177619ef6dfbf30c1e9e79c518f5b296c6235c3..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01001_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b4d8467659d51cf25060f40b030de0c554bd9640b5f0b8cead3a3ccb0651417d -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01011_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01011_yes.png deleted file mode 100644 index 9900ebf830574b81f1e3bc08c9db0e086be279f9..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01011_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9f1c9bd42f0de8e0194dc9fcf3e5d1f2dea97e5c1952ada2d80a961d3ab96253 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01063_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01063_yes.png deleted file mode 100644 index eebf761ff136925410f5daeb60c715d3e15da08e..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01063_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0f8a0107eb390a3fd7f7065c91c67593fd1f959d0a1df7c605aaa23b01bb8444 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01073_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01073_yes.png deleted file mode 100644 index 1a72de212610ad3783f4c0a9f2f3fbebe15a48d1..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01073_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0bc82498e31012e90543eedea2be746e9d4fc062e33c2c3ababe40b0e3cd4135 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01087_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01087_yes.png deleted file mode 100644 index 1ee5ff126a9115cc48927d4062cc80062aa58cd3..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01087_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:176ab631d246f48fcdc6ce21dbcd9ff3adf9555cefa6620468996e6adf6f8f15 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01097_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01097_yes.png deleted file mode 100644 index 72a594dbcbf778c8a24f1f4797318b09a28d2a40..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01097_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e7c69eec95b6883a8d763520174f08ecb626dd8c496045eedc0e5818a795af23 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01124_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01124_yes.png deleted file mode 100644 index 5cd16efb0c75a3dd3ac39c236397c9e5eea86b72..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01124_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1a38dbdea3c15e608678cd3faad3137a246076a98217d1d631dc7cf169f97174 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01134_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01134_yes.png deleted file mode 100644 index 5bdcccb7ee626a39880c5b318115877792b5a38a..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01134_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:287fc8452bb7b62c8eccb26a4fad73ae02eaf5d929fcf7ef1b526a01f2f5060c -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01146_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01146_yes.png deleted file mode 100644 index 40acd72012aaf0f5b320f7d0f90808f6f4c6eb56..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01146_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b95df708c661ad45fa116141d942d7481ba77139e9bb8046e9bfe662854403e9 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01156_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01156_yes.png deleted file mode 100644 index a9df3499efaa68c502a76f81886a91e7ef9d9277..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01156_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:352248f075fbdbb9daa76800df6945c10b888abbfac7c3992041aed8202e6c17 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01169_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01169_yes.png deleted file mode 100644 index 7125b90ec59d41decb71eaa2a0d431db6836f2b6..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01169_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:06db6e3541388647713176d063f68869e87cad50d28b2a794853cb7e5b957a52 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01179_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01179_yes.png deleted file mode 100644 index cb85b8e7d762789cc9de602eaa1a626bafa72c5c..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01179_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1eb1d43a9a3bd2c52308c7ab1c41cd033293e4a8c194d6fa66f3dc6e08618ce7 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01220_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01220_yes.png deleted file mode 100644 index 56fe44df43a1dcd4da137948fb070f882e5918da..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01220_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:cfb345a6f4ca0bcbad18f2bafb0911485ef9ab4ba177744e8fd038ce3c791588 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01230_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01230_yes.png deleted file mode 100644 index d309786fd0b10354056af69ed6ebdf5c526c0326..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01230_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9d66c6b3d778c8946dd044c40c6f1d06d576ffa6b1ec238339aa43aed8f0767a -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01242_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01242_yes.png deleted file mode 100644 index b43a2eef4425bc0f3172385ae1fada17dab14370..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01242_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:553f412adec4c2a7efce776e0f0cefd1a17ffbc5a063b705327d7a295c1f2461 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01252_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01252_yes.png deleted file mode 100644 index 1535b484aab75b885a95c3f48bcf5ede1dc73249..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01252_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ac0db2883e0e8000474eefce968b95c13153d7bc8e48513fb93be827dc412929 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01289_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01289_yes.png deleted file mode 100644 index 022116c662ab5b38f65fa3cf4ac57247d6f6cfad..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01289_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e13fe0767754b45c451027fbd80293d8ded68670dfe874a3773e1e11012c200e -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01299_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01299_yes.png deleted file mode 100644 index 90e3c0c58c141d3cbab87182c366e7b087f61431..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01299_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:35b1c59807c283cb4ebd4c19fe2de5c4bb4fe41af8b17e7fc38bc3136875d440 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01305_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01305_yes.png deleted file mode 100644 index dc217321f15da92750688a8a162d19754f7f320d..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01305_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e9655dd0e2fa8dec695c9c32acde0f0b3385f6c37a9b3ced3185e9b14174fa79 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01315_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01315_yes.png deleted file mode 100644 index 73414c51a6d45ef0bb91c987598bc4001ae7abeb..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01315_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e37495e5680976b8c1b59db376575f4de0129e1e17e02e4038e405d1b29d6f3a -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01348_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01348_yes.png deleted file mode 100644 index 0a2859ee38ddd899aeeed98ba2568d037d218924..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01348_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9c22c7ab2de9b893cdb8d713e7d918d95f39b71fde21cbff86c3db580737bcc7 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01358_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01358_yes.png deleted file mode 100644 index 97441af96d4263e5693bb14317adde478e878eb4..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01358_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:407725f98fa7dfb5ddc8d8dca12b115d389bd5f2beba5b8868fe707d6bb563f6 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01367_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01367_yes.png deleted file mode 100644 index 28a6edf96c5c7e25c7e7197b09ba5473c0c7737a..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01367_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:fecbfaa60fd9c6e185084243e7179991326d290af5067b2b884481a726f280df -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01377_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01377_yes.png deleted file mode 100644 index 0abcbe1d294c96f2f2567e0d1ed0267a5bdbe573..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01377_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e8e00acbc605bc315ee3d46ff19898a8f642b5c4d252ad4b8a6374784da3fbbe -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01383_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01383_yes.png deleted file mode 100644 index 97afee56c4ac5e2520553149920644949d73da1d..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01383_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3ee077b13a7115d5e831c3f88d466a2887b9ad9bdf471eac4e8903f222d2c67a -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01393_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01393_yes.png deleted file mode 100644 index e8f5d9db069dfa0142679b1fdca4e4c507843303..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01393_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0341df9d9c5a798e1ccf78525a9baec48ba095d748ba2f4331379b86fef920b5 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01407_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01407_yes.png deleted file mode 100644 index b739797a3a6964cf8d5b0a5377bff6256e75751d..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01407_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:02dcd05043ef0491d0c3ec0154b5f5bdb2b82f6729f58358d89456a6181a3385 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01417_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01417_yes.png deleted file mode 100644 index a09492dcd66cdee67e74eedbbc67ed4fce3fddc3..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01417_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:394f76355bc77f1304180671761d4f8fcfab18b8ffbd6e316d89ed7567e350c5 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01428_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01428_yes.png deleted file mode 100644 index b5b8ab2d9b21a50dff778d880f44ea66bc9c6964..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01428_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8a6ce2cd9684e01996aeade0a5f82c889fa0ca7be18e65753b9c027a133f4943 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01438_yes.png b/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01438_yes.png deleted file mode 100644 index 79e82b60d6ec897401900d2feb50a2ed1da851dc..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/crop_video_front_one_piece_dress_nodded_cut/front_one_piece_dress_nodded_cut_000/01438_yes.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1c0468bc84b272897c2795162dd8b5cb0b2eaf6e178567c6b32dd06a50617271 -size 496826 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/df_anchor_i/front_golf_wear_nodded_cut_000.pickle b/stf/works/preprocess/nasilhong_f_v1_front/df_anchor_i/front_golf_wear_nodded_cut_000.pickle deleted file mode 100644 index 14f14a7fed3151b9553d5ac4fe8f9a3a8c820fd3..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/df_anchor_i/front_golf_wear_nodded_cut_000.pickle +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9371a6b8c525a2eb9f90ca286145c97143e76f546afe24aa99e61ca33b34dcee -size 38482 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/df_anchor_i/front_one_piece_dress_nodded_cut_000.pickle b/stf/works/preprocess/nasilhong_f_v1_front/df_anchor_i/front_one_piece_dress_nodded_cut_000.pickle deleted file mode 100644 index 381daa66caf59b0da279a2f3bf62ed78ef5d84bc..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/df_anchor_i/front_one_piece_dress_nodded_cut_000.pickle +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:dd4704c0a9c32b22b14deac1e966127d5a2727d957cfb1547f88b68ea18abdae -size 38566 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/df_face_info/front_golf_wear_nodded_cut.pickle b/stf/works/preprocess/nasilhong_f_v1_front/df_face_info/front_golf_wear_nodded_cut.pickle deleted file mode 100644 index 1f7b08bec9497d448bd1a70b36dac6c15f03e844..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/df_face_info/front_golf_wear_nodded_cut.pickle +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:44408b86e82431f50b069d3a3d408e62706f106f0d7e79bcd7118723cda145fc -size 143627 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/df_face_info/front_one_piece_dress_nodded_cut.pickle b/stf/works/preprocess/nasilhong_f_v1_front/df_face_info/front_one_piece_dress_nodded_cut.pickle deleted file mode 100644 index 2d20930b614d2a225acb676794c5dc074a0021d5..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/df_face_info/front_one_piece_dress_nodded_cut.pickle +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:05d953518ca36c9243168c56058b37a46aeb0cad940dec75cf0626ae75ec5f8f -size 143591 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut.webm b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut.webm deleted file mode 100644 index 84797a5be15f2251316d74b2dac9526cec66f4ba..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut.webm +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:bc3f6f0e5ab11b1db1c12fc397391e66b4152a0272b31528998b52fffc416fa9 -size 12855848 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00046.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00046.jpg deleted file mode 100644 index 16533599cce3e5245c93565870589c088f2fdad1..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00046.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:997546cfa5351ca22680aa4ccc5ab7d08932d2ba7b240956a61a40a290fd1b95 -size 29076 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00047.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00047.jpg deleted file mode 100644 index ab1a65cd2b339f98aedde6b9779323a1da9ce0df..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00047.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a5e2feb08a1c182ba4bb8ad7167cad78e1bbc3d2610f1dfd5d21aa3328d2c572 -size 28978 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00052.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00052.jpg deleted file mode 100644 index 8ff4543318bc5d21f3d4e2745368a14bcf253adc..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00052.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:10f73013ca41817568d8b99e461a246671f0293aca39244e24b1ddf60a77dcda -size 29337 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00053.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00053.jpg deleted file mode 100644 index 670a06e42684e8574219412d19be8a1c1df3d6fc..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00053.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f8029bfc317c39c1f72c7046a48c252bf70eef3dd189b7b33c4fa2fe320cb39e -size 29284 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00084.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00084.jpg deleted file mode 100644 index beaeb3ac82f7f6cfee2d2b92a7d39f21172348d3..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00084.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:16a47ae7b8d6d7209cd628444f42384e567477799faad5791acedfb16a262b6d -size 29156 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00085.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00085.jpg deleted file mode 100644 index 5bdb8c671d4775ee3f9aed9cdc3bf83656fb86f1..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00085.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:16d62cd03cbe08a3dd96f38695083ac0e15c7d0d948363eb7463818a2253ce09 -size 29161 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00090.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00090.jpg deleted file mode 100644 index 5a399bafdac475c08ec045f0e10deabab9462ceb..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00090.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4e3eaee6c1060de3c06995ffdd700d92c50f29f48adb8500c6557d75ce9cb61b -size 29130 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00091.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00091.jpg deleted file mode 100644 index f759802251ac948aaa2eaa62fa5db965524b338e..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00091.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:322af7c087c1a2af407eadaea52200d1be75a5e3e561b3fd948346a21f2655a5 -size 29465 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00119.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00119.jpg deleted file mode 100644 index f72dcbf6d8cac93592ecc696ac29f90ab6897bba..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00119.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:33cf89c11ffa423b0c5f478658ac3a6ee7cd1fc3ae81aedb1f832bea28b6f952 -size 29097 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00125.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00125.jpg deleted file mode 100644 index 78ef024f2a5b402a095ee92e3c485a5a2f8b3418..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00125.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3e99521153c7900187b7cf416ae14122f7c0b00dd1a5c20943acd6898757477a -size 29158 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00126.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00126.jpg deleted file mode 100644 index 895d3d6df64e5c64ccb18c5331f227bc1a3f9607..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00126.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:73678641c03d8f31f0927965115cf9dc80bdcce738952c79dbb9ca0184047e1f -size 29157 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00127.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00127.jpg deleted file mode 100644 index beff00f80bac422e1d2b1ed7680e68762b8f3f3b..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00127.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:19fcbf8acaa247006f37c84b165a9b65d56ae4f08add2b43feaa8b224e614df4 -size 29097 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00131.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00131.jpg deleted file mode 100644 index 3e10a277d931a6355aedf15aa1eaa1bcb33505b3..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00131.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2bf9f40186efbae6f9119257ab9f3116791dd68c81ae71b29c45cf1eb81af481 -size 29279 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00132.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00132.jpg deleted file mode 100644 index fe7c11e6df44247249f4840216d9033d65668176..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00132.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7e43bdaa208704b409325959ece3385f51189dc37869b1fd466f293e6ca87883 -size 29183 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00133.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00133.jpg deleted file mode 100644 index 075648c20f6f8a589e9fd85947e3284359290c63..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00133.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:893a5c36c69d824fda782b7bb9ae83e901ee42405b2b68126e7957ea6dd33c9b -size 29079 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00244.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00244.jpg deleted file mode 100644 index ede0d69a45abe7bfab2ba34101a01ced4ac8f9f3..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00244.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b6ab2e41d97f7707c605e0bc83f6afb5adf972ec0fb152d92a1ed7a1dea6f674 -size 29078 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00245.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00245.jpg deleted file mode 100644 index 65b1ed322b879214c6c40a318484f8de07e94fa2..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00245.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4df45ff266ea2c6adab3974c90598339c361c7359f073502049c4cbc57ad00f5 -size 29061 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00250.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00250.jpg deleted file mode 100644 index 52529e25d6c7f35f6c07e13c4c82614a43bbb0c8..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00250.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:85d099a872e6543ce950aeb5ed79d12a9bc32ca74217c1d37a14379cd1f89b9a -size 29184 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00251.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00251.jpg deleted file mode 100644 index fc059d3768ccd961fbb1981f88e4db7e6b2a8862..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00251.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6cebc657f76ee4d00e2ae497691764feb5c19413230dd5cdfc5b2305aa90cda5 -size 29087 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00278.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00278.jpg deleted file mode 100644 index 0898490c306fbb02f7b00465ccafca6cf15ef707..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00278.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9f41ea79594f2bcff01122c2b75bb1c9965b1b99ea0b69d9cc3c7f6604fbcde7 -size 29196 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00279.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00279.jpg deleted file mode 100644 index 033b049ecaad9188d8dfb18cd03995af92412248..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00279.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d3cd79ff78be913af8ecd0b61d956492ab8baf60193b91826dd14545001c3138 -size 29054 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00286.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00286.jpg deleted file mode 100644 index 2f34c6ef2b45753a85038fbd2b54f9c53bf3e75d..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00286.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:83a2de70b5dbea0a507b807c23b2d6818537e3835ae3ef5aa6389de6cdacd747 -size 29066 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00287.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00287.jpg deleted file mode 100644 index 658946667e245bc6f7f5a471612c4958e306faee..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00287.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5b4c49992b544bcdfc0f68dd00b9a536b48dceaf11f2ca3b3581b7ea53d4b107 -size 29551 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00292.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00292.jpg deleted file mode 100644 index 80c16e0a4778591b4d25d6fe5e5fca95459d8d87..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00292.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a85534aabc01855327bc14f60a92b65c23b5516b3ee846eaa238c73ce99f9a60 -size 29203 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00293.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00293.jpg deleted file mode 100644 index ca734d5f85cdf9b9c23a83a77790ff9038b32602..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00293.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7807c0154208523757f364829e28081062b9573edfb4c20d7d425035686f9d6d -size 29236 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00318.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00318.jpg deleted file mode 100644 index 7084755b0c7836b82b6906b2c1ab04c0aad2a830..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00318.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ce28499edaa97bf3af422f1af03b862a9fd8de32647f23819093bde7d8d909ee -size 29339 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00319.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00319.jpg deleted file mode 100644 index 90730e64a447165314b6cb422315395b5d4f9003..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00319.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:dd148e34084a9fc28ea2da1b1b87e74e96836b118b52cb9dd122f763a3e5594e -size 29237 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00324.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00324.jpg deleted file mode 100644 index 63a2bc502255212c52f9dddfdfa9fe3fc90729cf..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00324.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4497925cf6035660cef14c52d1c79f869f2402d24b2829438b8ea3710fac5b5c -size 29085 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00325.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00325.jpg deleted file mode 100644 index 99cb05d6d2050500c71751f6350716e58b62eee2..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00325.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a5d04cd1f003db275619c242e75d00c6fcc959bb684ab60db014e79519a5ad2b -size 29094 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00330.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00330.jpg deleted file mode 100644 index 1a298b5e7b8f1e2358915361e37883314765e756..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00330.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:564cd95908fa24c47a94353762474f91e151a9ffdd75f2df3d4eb8ab6dd9772c -size 29165 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00331.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00331.jpg deleted file mode 100644 index 6ae0eaed477feb7bb8a3dfebadfed15033ef7e93..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00331.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:42a8352c511432fc781aba2d044ba56aa9e25a2f655506af00790689ab3c9cb2 -size 29229 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00442.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00442.jpg deleted file mode 100644 index af92b8752e07fa747a520c2dbd6c514ff5dc7c03..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00442.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:287781d35bf1e1264bd860e1fefe34c55ff898d9954ba664f91412691a40fe7d -size 28987 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00443.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00443.jpg deleted file mode 100644 index 051bd2b5105df14d663ea7c9b9aa4cc6707ba0c0..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00443.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:18603179ea1627485c3e61e5fcd450c20d1c64ef235b77477b16231a5a5d4ef8 -size 29069 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00456.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00456.jpg deleted file mode 100644 index 70564a05928dc5a4d916dc9777357f07b745dc84..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00456.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6ab064eda054be93e96194aff53e279371dcea3a1e1ca6e56c1bb68ae7ad27be -size 29208 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00457.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00457.jpg deleted file mode 100644 index 6fcd819e396c9e2d6a27b3b61623a3a1fa50df9c..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00457.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d206c71892bfbc39d7c258711d474285c9c3c7df2f62f127b266df52159111b9 -size 29082 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00480.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00480.jpg deleted file mode 100644 index 28e531d2d0d6c90e247e29ed24a32d61e58b27db..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00480.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5159d75debf373d856481883f2f32a71805c169cd68c4c74f39b5980aff8de88 -size 29001 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00481.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00481.jpg deleted file mode 100644 index 1f5265ae5d8dc7eee20ec0ce65c6363ca1f7eccc..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00481.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:369668728b8ec6fe3da50d28262b6e0c7558eabc6494de2a842faf8ff7335c4e -size 29009 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00494.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00494.jpg deleted file mode 100644 index cd9fcf44f3de49659e6749a8e480c122a885bddd..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00494.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6b59c730f502d0c9c01831dc3ebb23849223b4cd9d980905800c1d7ccfaa8a19 -size 28971 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00495.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00495.jpg deleted file mode 100644 index 143d4a6b8a04e22652cd90f69efe689f48f242c5..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00495.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0504b4d7ff6f893ca6fd5553b0e14fd9a8694ce94e3724e5feef5479733c5e67 -size 29404 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00522.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00522.jpg deleted file mode 100644 index fb78e2601f7300ef52f7041a96ff27aae92a0240..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00522.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:fbf6d0b7f186bf24b623c97adc6c2c761415dfa5dac83cd2fb493a92a6cd0916 -size 28758 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00523.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00523.jpg deleted file mode 100644 index 56df13d02160e2e9e74a1c217eca4cb487b6604f..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00523.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:09e3219942af026ca648072ca01b8cfee08f991cc5aedc0185a33947b13ca99c -size 29328 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00536.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00536.jpg deleted file mode 100644 index 48c4faa7570fcd521b10f3aeaa6dbaa4afc6caf9..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00536.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ce2f08e30454630b9e4934c79db2ad2d7981b8d70cebbd58e8885fe390c94754 -size 28946 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00537.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00537.jpg deleted file mode 100644 index eecf96c41ae236f466c70188108e742165913419..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00537.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8970b98ab8f5405a6db7f9f80e4f50ef8a03f84386da2a5863c3cdc484a4e672 -size 28924 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00640.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00640.jpg deleted file mode 100644 index 7081c09cb29aa5a4b090625efbbb0b4945794efa..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00640.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0ee54463d34a3623960cedd011612aa1bc0985ecb53c39c913442b566a9377fa -size 29186 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00641.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00641.jpg deleted file mode 100644 index a5b0144293bfd4a186d16bb24359ff2ab367bd04..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00641.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d85a281cf69c105500a2adc34b6cf9223407793a4c74f84267c39e5f54660c59 -size 28762 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00643.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00643.jpg deleted file mode 100644 index bb3bcc072aa455169d1c321f0977b6972757e332..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00643.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3aa72e153097313479175fc2cfdc86fcf015f507b83e23c85d5c9f34966a7b61 -size 28688 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00654.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00654.jpg deleted file mode 100644 index 9717693c2091ea736f72b890c50f0099853553c0..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00654.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5014079cecef69ea7b4fcdbe32e017dfb3c6ed104b5df5c8864d18efe5d2ee17 -size 29218 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00655.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00655.jpg deleted file mode 100644 index 7aecfabb8ff0d21c965cd272bded7618a9feb94a..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00655.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d50b4940da2f5b9640d59a681408f11507f7c1064c9ef0b966e603c0dcb35263 -size 29181 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00657.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00657.jpg deleted file mode 100644 index f35f12b0d2102f63968a9bb22e4ae9394881244c..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00657.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ae1387ecee9cd6f4a57782c55acc56dd49334645bd1a6f940c114fb4de0ea05d -size 29122 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00668.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00668.jpg deleted file mode 100644 index ea69da98c78623a314f7d5606677c9f4f283fdef..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00668.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0b48ce3f16419d6f04c2074eecbce32d981c8551f1c7127f371be079004c350d -size 29085 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00669.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00669.jpg deleted file mode 100644 index 09abb7de2f159281f95c9e605994db619cf7e776..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00669.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8ebb493e746959610b3c375d91c7aeea8fbae5dcb4025e63242843e89e4f1863 -size 29201 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00682.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00682.jpg deleted file mode 100644 index a74bd389d47fd7c259a5badb9ab29ca96313470f..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00682.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d2036ac43c4835431de87bbec9db5eb28ca0440d395469ecd93341e13891ff85 -size 29394 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00683.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00683.jpg deleted file mode 100644 index f13e194675f62da181f6bec0cf11a1f5380f5cfe..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00683.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a76ef4d4535211c86b474a156faa779da4b671f0289bc597c877fcaa8d3efde5 -size 29340 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00696.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00696.jpg deleted file mode 100644 index 5dcfc8763da44de2a7aed6f4b9072f91790ac326..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00696.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b5f58468e6f0bdee487b33f36f81235e31a4eb95d98ee860a8a18059912ae489 -size 29101 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00697.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00697.jpg deleted file mode 100644 index d60fb0b52639b2e678a081bfeb64738edc540243..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00697.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f064780d7e0272b7ccf8fa7807a62ce268aedc70ae79234426a485c6207a07ed -size 29058 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00708.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00708.jpg deleted file mode 100644 index 6c347b46a99db0163b1aea6745ecf9a44a733a77..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00708.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:92b74a3ba18d5ead65e783d1958b5f2eba999bbdeed2a164bce7de90fa894134 -size 29033 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00709.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00709.jpg deleted file mode 100644 index 652f30949475656a6996a249fc59f4cd471ea788..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00709.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9f1e8a8553e0ffcffe41d15cb7e850158dad7fd544c0ef34913c2727e96f88cc -size 29087 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00720.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00720.jpg deleted file mode 100644 index f0c2ce58a216bb931757b26409234c4f599d8be2..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00720.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0928f79cb58f386336e56a6b798589b362c928223a30ab724f78fadee10596fa -size 29110 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00721.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00721.jpg deleted file mode 100644 index 347079d2bf8977539068db38e3cf678332864205..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00721.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:457cfb54ee6b8d71265b7896e37c3f7018a6a5aa84fba0a9fb91338f2fa95880 -size 29486 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00734.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00734.jpg deleted file mode 100644 index d14fb973c084be456ca88fa3f9b6e9047f56a429..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00734.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a747ee393daa0ea0b0219745e44af84ab3eb8be251007891a15e8ca5485f51a8 -size 29219 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00735.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00735.jpg deleted file mode 100644 index 36d4d77eabc2dee09b21f31bff44593f3eb2c162..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00735.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3e1924c86d98c358955576e749279f6e1dfa9444274be76857bf24235466580a -size 29141 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00858.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00858.jpg deleted file mode 100644 index 9fbc8e8f4bd7e8739a6aa72f85f50024b04ffa30..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00858.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ab843e0953d5bc7f5abd35adfffb4eb0db33a8d2f9617f1bddf7c9d1f7ac3191 -size 29147 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00866.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00866.jpg deleted file mode 100644 index bfab33fea2f23560c0995f4a3362ee950922d6da..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00866.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:fadad518bfc59c5f9b97fc5cd923f41469d44fbd7f5253adbdaea0fb96d7a9b2 -size 29134 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00867.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00867.jpg deleted file mode 100644 index 41dd844f29f244860db593582cec3b8671cdc3e6..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00867.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0b3858a51b32df3c7ba0c955e13041b88eddf2413c41911e14aeaf94be8816b9 -size 29094 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00872.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00872.jpg deleted file mode 100644 index 82375c1fd7f2c7930bd68c804aa5e276b2a4b598..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00872.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:83f413f724ff0e616c3bb661128f28ab6aeb25cd78aef7deb0c4103e070fd3a7 -size 29172 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00873.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00873.jpg deleted file mode 100644 index 213460552694bd87b6ce96b5ca5149756bbdf164..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00873.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:29ea345fc9bf79a4eaf3e70dcf8e8fd16eaba90b8616b85764b7c7071b17bbb5 -size 29183 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00898.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00898.jpg deleted file mode 100644 index a399f01f738bfdfc803fc3fc2d47064261850998..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00898.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:56dcd0673e3efe022d8252f2ad9c17d53c6faeacc040e0b2fa6f6b3f76d51c89 -size 28591 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00899.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00899.jpg deleted file mode 100644 index 32c44978ee123a71861ba21a1e02128ae789014a..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00899.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e40e9c6128c6b9da2a852d6502e9f3b867f02234de0d5736bbfbb2ae0cd13892 -size 28547 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00906.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00906.jpg deleted file mode 100644 index 600b0093f4e496c277333f400eafa3401f4cda21..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00906.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d3112c5a1ce5b1cc619abfc6552f96456f27c22fd04c62544c58e1fc338c2adb -size 28829 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00907.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00907.jpg deleted file mode 100644 index 5a33292e3881558baf9f5a0c38e6df66b26ad9a1..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00907.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7a19c6dd2ffcbbd14b2ae9dd50a084e5f1594519c814f4af28c3cb14c116f42c -size 29608 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00912.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00912.jpg deleted file mode 100644 index 495d249cdcc5d070045050d9f4c630d35bed852d..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00912.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:639258bcb9ee72b6d9dd74126eca64934dd1cf745ee0c7943e5d54641876c26c -size 29074 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00913.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00913.jpg deleted file mode 100644 index 84d0bf690786722e97328e2a125ac679a3c2b541..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/00913.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e5786f96f75b2d093173253d77dee98a83f28efc12bf400e68063b7b8928ec53 -size 29039 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01006.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01006.jpg deleted file mode 100644 index 398f971f0a943c8c73b979f41a5aa2d5a672011f..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01006.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c93a475a01d7973eb32599ed3f079eb394431d4a8a88bbe84e8f226be0061985 -size 29134 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01007.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01007.jpg deleted file mode 100644 index b04a97bba3a996573af8a3ca1c40856d3e586725..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01007.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:174f191abb42e1922e5ea31ef940943dc7ce98c540c5a727f63a23bca58b6800 -size 29511 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01012.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01012.jpg deleted file mode 100644 index 8bfa46a6a2a7a5a997f7165b12f8ee5eb6e6f44e..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01012.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7c3a286617374f967c65c8e362767852c527a67e6260b94a6d310d13d80d00e7 -size 29226 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01013.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01013.jpg deleted file mode 100644 index 0fdbb625ad91be46663b80fb63a73160937d0ba0..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01013.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:dcca1276471c13d2cd26f621fe7683b3fd44436f0e2279d890b9f571345a0794 -size 29213 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01166.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01166.jpg deleted file mode 100644 index 892ad35a22de1eac5adc1d1f81e4c1197395a5e7..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01166.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:62ab825ab7095da142a622b7f478075e642f30f58d5ed94e7e54b9d461f93156 -size 29261 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01167.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01167.jpg deleted file mode 100644 index 8aef1b0a5eb110addce9f8ff9fce16294180e727..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01167.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ca4a833b8fdceb6d10d15953ddb4256a28673ce103ed7ce9b914e0817616e554 -size 29274 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01172.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01172.jpg deleted file mode 100644 index c031a5e0117aa155f09b2ec0e1267de8f4efcc44..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01172.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d2368eb2b34c8f0ea8123a8b97e65182681b967dbff3abbecd33a68d1a81586b -size 29123 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01173.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01173.jpg deleted file mode 100644 index c1deb0ef54f99c59264d316b6260749d95c8b080..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01173.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:678e766907f79c042eee15cc512ecc422b1ba052eee3dfa2852a74bbc449a017 -size 29425 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01198.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01198.jpg deleted file mode 100644 index a97b4ca7c4785f92fa941683672908a9cb81bb8b..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01198.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:768b2599224d61d2f572265a5b1d52aac1adcda9d8ed511a214bcec18f09a2ec -size 29165 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01199.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01199.jpg deleted file mode 100644 index 02ff8af9f855299d9cb3b3b3d4df1175ddbd17f9..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01199.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:caf2f6f32d39c578bc58a6bfec146c7b7f979f7b3799d860eb44174d7d3a6ec6 -size 29134 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01204.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01204.jpg deleted file mode 100644 index 3876f8114e1c7702c1fc17b6ee3e0de711e31212..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01204.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4bbdaa5474345af89ea2542c13d055ada1282fe65694c8ae9f77a56bd43a064d -size 29359 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01205.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01205.jpg deleted file mode 100644 index 1acd03fd4dab0a7b0a1f22236d1580f360bcefa1..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01205.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b3409bbd25ef96a80de8b0981d80af9ffa1bed2a49cd2ab1dd56c6d02b56cb05 -size 29288 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01207.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01207.jpg deleted file mode 100644 index c47c7281a85ae8116ea6251e9c0f5965bfe685df..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01207.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:bb89679b5b6b6f1149dd1d5504d08836cf42a6e635dc97d9b2ce5fec434924ef -size 29181 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01210.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01210.jpg deleted file mode 100644 index e13d27339046b709127856703714ef915c510be3..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01210.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d5b3762d9f902835e311c9a8d2f6e2167db917c6f646c3b7d5212138533bcb0e -size 29093 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01211.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01211.jpg deleted file mode 100644 index 090455ee3309f2c5011fa98501c1b23d370b7861..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01211.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:746a4010130c71800d315f6f4af0337b719ab43f848bd2f71ffc7188ed406be6 -size 29080 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01213.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01213.jpg deleted file mode 100644 index d15d5da64ecfbefef2a9a6f76f7f799c8f484901..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01213.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:16f821c28efa006512f7f946b46d2a275920f1eafb335c1e27519e0acd370964 -size 29460 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01238.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01238.jpg deleted file mode 100644 index 49e7fb57b710887d6eeab2091c933358c7a7e3d5..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01238.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:51d7f82c851c1c01503c0e6a26e6ec70ea0fddba8bab4aecddcfe4fad84423a5 -size 29179 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01239.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01239.jpg deleted file mode 100644 index a3e5925f3bfaeec442836243b0c81fae630d2bf6..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01239.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:886932d43030fc851f7ef9ed03f7372d6ed2c87c20574ef239629f7fe684b267 -size 29228 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01358.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01358.jpg deleted file mode 100644 index 5ae74ef8c77e6b338150ca96cd14d153da367000..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01358.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:569839493843b675976b43ca45cdf46edec4e1b299252c8c0b181d9511c75f5f -size 29055 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01359.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01359.jpg deleted file mode 100644 index 2ca8244b34db8929e90ae450a5546f48651b7dbf..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01359.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:56050a455c133724018a5e1368ea158417220f7918b08d73891f0b1d4e9a381a -size 29090 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01364.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01364.jpg deleted file mode 100644 index 40cd3791a23b0aa9874c5e6c7d6694444fe9f8b1..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01364.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:22d5be070468c45684bd42c6c3a6b04e767c7bf90c211b16d07a2fc05f292444 -size 29188 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01365.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01365.jpg deleted file mode 100644 index 0ec7401402f0411399abd2d08283e6c28d22dd3b..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01365.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e3e2b98236771e634d573ad7ab8c70dd51d33599cb66e7e82fce80a7bc2f7099 -size 29149 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01370.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01370.jpg deleted file mode 100644 index 6da258c3e771676d215838d16a37b4b8f3832c09..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01370.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:08ae7dc4aea19c071a8fb036bdf5266f92ee457da01945ace22969289ef9200f -size 29094 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01371.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01371.jpg deleted file mode 100644 index 30364b92066f8d9e9c138d9a8317384b1c42c832..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01371.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3e0af68f6f003b6849a6257454523c64677a5576c2c950357e64351c2d8726a2 -size 29445 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01402.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01402.jpg deleted file mode 100644 index 6acecdb5ea013348d9ed56f025844bdca59bcde5..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01402.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c801041cb8c59af19f19ce31587f19c1ca60bc8cba0d36df04ebd441d5fe4e86 -size 29304 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01403.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01403.jpg deleted file mode 100644 index 6990e1c10e72aaab6a09df1117c1086a3e7ce4bd..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01403.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:bb61c290531ccc19b418b23908bbb0a616bb0af8d442a4492293518d8dff3932 -size 29197 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01416.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01416.jpg deleted file mode 100644 index 3c54fae0b2d2494689f8213be8ece878b8a3f1fb..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01416.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6667e742d800726a46fb43f4deacdde88b202540ab3a89b9a16f18e80bc7858b -size 28729 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01417.jpg b/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01417.jpg deleted file mode 100644 index 4647b68a3658bcbdd2ac49029fe45c43bbc3ceae..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_golf_wear_nodded_cut/frames/01417.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:bbce73233439232b72163648ff81ad5daa80c7be93048a809dc877694cad0798 -size 28725 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00000.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00000.webp deleted file mode 100644 index 86d6d6bec08e1e21dc7992e26d03de64130dd40b..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00000.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f9987ed0263229eb63157fcd70f904400878ace30732c5d3245b67950ee5777e -size 598616 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00016.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00016.webp deleted file mode 100644 index 8f51477257e3d6c552b42b0c222318deaab45435..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00016.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0215fa562145bc2278f63521596d9b913dfd6256d098fcd13847b8e1c622945a -size 639754 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00041.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00041.webp deleted file mode 100644 index 1539537fdf490a255a356eb33bc03b5834ede543..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00041.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a47e48159cb27393a9f5e9998aba8d7550fb3ce2ab0a749d099d79b4504755bd -size 624440 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00057.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00057.webp deleted file mode 100644 index fe63e19326293055795f63cd4563e0dcb7d4b07c..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00057.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9e94763e7f0e9cd42d967f06b86843a3e40d05a882d6fbce66369779aa9b4758 -size 626164 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00082.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00082.webp deleted file mode 100644 index 7061c59133ae15a34897cbb1f8d69bc87117218b..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00082.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:322b84c5889f7a1686ba70cedeb930348c5455939e8ad49242efd076ce58b675 -size 621842 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00094.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00094.webp deleted file mode 100644 index 7e0085e3e8e22446b30b8f3f6ef34ef3542c8b6d..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00094.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5fa9786feeaf28aca675de418e830c1d76d2a60e50f1752c0a4263c527392604 -size 621970 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00104.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00104.webp deleted file mode 100644 index 41d1d15e2d43aad3db0d71fd884c61c5d1c60fbb..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00104.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:fe0bff1f9d380daed232cab9530b1f405c40e201d187a8533fdaf1a2cca54bae -size 646230 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00112.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00112.webp deleted file mode 100644 index f6f76ef9bb0924a1f541b7faa6148706d1d5dc99..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00112.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:32d48192863845aa4ba0a7f59bb49fbc4d50b80416d0893c47e53d5d7252f090 -size 645298 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00128.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00128.webp deleted file mode 100644 index 1b7cae8bc527539b2e17d4bf39017b1b0e191d2d..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00128.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d107bda52847c551de739ac5c8fe488339cf92657bfbd8f4d4b1f7be5593177c -size 614508 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00145.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00145.webp deleted file mode 100644 index ca022594bf698cf34d668b9e1be71daf2b71c398..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00145.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6a7c796079f3379547d29959c360ff77c9eab09c8cfe70e6b9a099c3755d2ea0 -size 623708 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00153.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00153.webp deleted file mode 100644 index 1b73a00dec44ee464e8831ff1a0099096420de53..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00153.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:55365f3fb1875c896f028598a238bc8055fcb20c9b1a7fe61a76d554dbfd670d -size 625842 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00169.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00169.webp deleted file mode 100644 index ffd16c656219f393db1feea8756191d9d35fad9c..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00169.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3f8c5df31cff94351bd8e7c81987d36d990dcc0163c6748ccf118718ba613e66 -size 622166 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00186.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00186.webp deleted file mode 100644 index c6b6219fb040031037530c960f7b056d4c67f044..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00186.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f596655ce3e44cc5192e26d5e2fe4e9650746664999209852475142db0d51d26 -size 620386 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00190.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00190.webp deleted file mode 100644 index a493a741ca60c9039f6875c3ebf808e2d84ef066..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00190.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b13bd13f7ac02b5604281c48767ac1e865163bf892e0ec526b69a04c4d08e74d -size 618932 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00201.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00201.webp deleted file mode 100644 index e29ea7966486c4b8885eea5da1511daedc29948a..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00201.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:cc67ae1aacbc01c2a9f8282beb77e4b5251466f12d2dd30178961b36dd0c6722 -size 628158 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00217.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00217.webp deleted file mode 100644 index 7055aed1a4c05c473ffc0cba42f82445fabf4888..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00217.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1911b6176d0c8b49a23e0ae806306ea4ae14d63b4a43b0f698794fabb0a0117a -size 635632 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00240.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00240.webp deleted file mode 100644 index 73841725c58fa45bd93558587039e68059437288..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00240.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5facc6b254fd23746ce9cf2e1c26fe959ed1d5d71dd2dac556c79b2211e34f94 -size 646712 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00256.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00256.webp deleted file mode 100644 index 861d12d0e3fc9d824c495bd964c69bdd0b9cab82..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00256.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e37ee9e5f3ee44a9479d1cbe4b73b6bec1780d5eefc1d85e0ef53c6ed6cf080c -size 579382 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00283.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00283.webp deleted file mode 100644 index cd1a57b02b3e2c1fb663e58ef13389e7845a0dd9..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00283.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7831796ccf144be854ab318bd6babd1ce73de31e6179a877708e2d560a67c6fb -size 621442 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00295.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00295.webp deleted file mode 100644 index ac3241c25a903b8d3c67effe64bd3654c48ef17e..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00295.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:41ca6f709025da7844fc43a7875e16e7235be4a3248f01e070fce99b17a1ef69 -size 617596 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00305.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00305.webp deleted file mode 100644 index 8484c58e23ee7f27f1cec4009f41318ee39b0282..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00305.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:02ba079064328de83b1d4a431fe07c2ee85aa327da23acd3209258dbc5382d28 -size 633234 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00313.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00313.webp deleted file mode 100644 index a47a8f7b55236b918f76112741225a567f7c8986..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00313.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ebfb8544ec02edb16b005e5462e12643c5f5e984ff1bb7bcb89eb992dbc2bb46 -size 631226 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00329.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00329.webp deleted file mode 100644 index 0323b28bcc6ce315eaed56e2f58be82cb6939c78..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00329.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:089d493030cc7d7b187dda7387708553ea3258eb5e6a6a4604f48fd143a8d652 -size 622658 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00344.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00344.webp deleted file mode 100644 index 23d7eb7988b131842c2e67e10bb5bc98bf09fd12..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00344.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1bb5b3f58178bdce445a6b13bdb2d9809c806e203050420e67bdb2e6b813ac31 -size 643430 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00352.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00352.webp deleted file mode 100644 index fb826bc319db23c05436e8c43be15cddbc0a5a8e..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00352.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e7182222d022ab9a4cb46aa414c385479b87e97f882e463515f76cf46cffea99 -size 643082 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00368.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00368.webp deleted file mode 100644 index 90a0d3eefe571ec624fe78d152adcc5769cecf6c..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00368.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5b052c6ceba253fe94c3680dd9ae9519e2fa1d1f061c3cfbd8f2797df51e51a4 -size 642748 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00387.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00387.webp deleted file mode 100644 index c30ebe5e0ff955f35e774a4d5cf2d594371be038..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00387.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b31e17fd22b1ff33e0fda3fd7c95c6e2c000fb310769991e4896c0a73acc8d4a -size 606030 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00391.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00391.webp deleted file mode 100644 index 41861c6c09f77de7d56e5fe09b88070fd17e99a9..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00391.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d21a6faa07d51792b2de2856edc770a2508d15b71c93500b47220417a4501234 -size 611562 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00407.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00407.webp deleted file mode 100644 index fc11ba29b456e2ee5209c65668564cb9b401af7a..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00407.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:88df0c3cd29b63a38f8aeff3307841755a3afedcf304a7394ed7d59e8f19d130 -size 616866 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00411.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00411.webp deleted file mode 100644 index 5a912cc48a0d75560beee7226f9e63b38fd92824..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00411.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b4a4e5ad087c72131fc3d348c325032160304b40749f2e5fa457356be320a5f5 -size 614660 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00446.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00446.webp deleted file mode 100644 index 6d107e2ff482287c688ad6d7c272cc6ec0a89536..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00446.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f794f835e8a87f65110fd5ee6dc66d90e83731e862e9dbeccb501def1f967013 -size 622592 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00450.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00450.webp deleted file mode 100644 index 585b7bd64ec6fc829899872054acb831afbbecbd..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00450.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:bba5a8c85fe5468a265a40251c0b9be8ae3ffd6c6c69be958eb6826c06460ff2 -size 623886 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00485.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00485.webp deleted file mode 100644 index 94ee90cb21d6bbdfff6b051b1f4e72b29b17fa86..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00485.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2670b42b24b6a04cb6d73a849413928f8d72dc9ae6be2d2f10de2ec936f5d07e -size 622572 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00493.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00493.webp deleted file mode 100644 index 8d7444efaa2bf6b10e4086f23cd9b995983b171f..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00493.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0086d1a333e0e3f27edf184e2c404abfa2187d03ec43d9abb10940121a434a6e -size 616636 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00503.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00503.webp deleted file mode 100644 index b2fbc84f1ef5393990f2dac18c0be3247dbc8ab5..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00503.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:86d7581f75ebb1197285821de05a6860949969dfe3a02975089afe0f99659366 -size 615058 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00515.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00515.webp deleted file mode 100644 index e53b589b994d20f7396280fa881a733ec032a9a6..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00515.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:16987a4d641abc10cd2f15d7a1b953f8ac4fddbb7f7230ae0c46234ddf67246b -size 606196 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00539.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00539.webp deleted file mode 100644 index ea0c68deb98126d72194e470c7a846139ef820ad..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00539.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7fe730f2252c6e711cf6f63b493778506ddae7204878b9d82345b677a52eeb3c -size 615282 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00542.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00542.webp deleted file mode 100644 index bd682fddaaad3ffab254d186bc6bd664efbbb691..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00542.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f2595fa416121159e6e807a0253750124f57778df4b1f0329e6ee630d4df2ce1 -size 644488 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00554.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00554.webp deleted file mode 100644 index 3e3687babc391baabcfbebfe0025fc27f121b35d..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00554.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a7d7c11aab26d38346efe9a6c370255d468cf59f219b3cca67a4dd9af04fcd5d -size 623804 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00578.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00578.webp deleted file mode 100644 index 10c1a71a08299c2a977dfddf6b192e62bc5f3c24..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00578.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:10676f3359a4a744f89bbc30b91748893db60272ba7c0206610a0471173bb474 -size 628370 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00581.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00581.webp deleted file mode 100644 index 6fc958cbebcad5488e0868a7698e207298dfd775..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00581.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:079703354f3b5e9f301f85b8f98747369060a5831402bd8a9535dd2fcf91fdf7 -size 619636 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00597.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00597.webp deleted file mode 100644 index 7f36f938955f71f5aaf05f6e6d49104640fe1457..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00597.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e9f8dd9f6c6931a71137c4951bbfbeb8b78cb793ad7862c0ef64e6a9ee0bf339 -size 625570 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00606.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00606.webp deleted file mode 100644 index 13e1d3673da15803d2c4394716f1277dbea16230..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00606.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:976571c9c589995a7c25b37af4173539686f11ffe577101f3fa27b3ba235bd5f -size 623640 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00610.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00610.webp deleted file mode 100644 index 441340aebf10d31cb98d234313b89400a3c0e122..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00610.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:73ff9419399f74e98b9ac9820b2f06b57ca4bb67b4446e4381bcb56239851336 -size 627946 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00647.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00647.webp deleted file mode 100644 index e7eee362ce98d1138252d67766d51b71c9743718..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00647.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4bbc7386e7c3196555c64e6144acce4d009f41f9ea2c4dc10df1e116ea290c88 -size 611860 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00651.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00651.webp deleted file mode 100644 index e3c4800b7cbf139b11ffc751c9ee249fab28bfa0..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00651.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6557e3dd2884a1ddefe578751ab36b6eda051b9971cd156cfbc30a59be30ef14 -size 624550 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00684.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00684.webp deleted file mode 100644 index 4232558ea15a736f6f772654bd5a6dfed759ed5d..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00684.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d4b3913eea7e6b3d6275546994efdf3f6213d2aefc005d057c1db6a847e525e2 -size 630932 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00692.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00692.webp deleted file mode 100644 index 1cfd732c17f36918a54c18970f1715c26c04a0d9..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00692.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e6212d4ec2b3d4baf468ed7564fbd1bd2ba0c608cd845deec0b78d4739aabb48 -size 632944 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00702.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00702.webp deleted file mode 100644 index 650d21e6ccbebf53f013f3988aed5d1a274177f6..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00702.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8c6a91cd323a86fee40039788f227e482d4b465196d4739c3d921aea71d40f99 -size 623480 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00714.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00714.webp deleted file mode 100644 index 1ad59e7f46557c7683b0eac9164613c6f04f29ff..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00714.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5eba9d84066418dbef7591e0512c3e0dcd479f714617e00ae66ea6d468f1c555 -size 626378 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00738.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00738.webp deleted file mode 100644 index 257da40225ef0d0449661f55413f23b889b0727e..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00738.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4b1c25adaaa2a807a850bf22591f64a904327a37a713ae8d5375690952fc486d -size 628066 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00743.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00743.webp deleted file mode 100644 index 5bb907af518f416a5e77fd4f254256aeb40a908a..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00743.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b287386092727f1524b36656a90529d8ae10047e6a5491827a5e4aa347a38461 -size 623292 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00755.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00755.webp deleted file mode 100644 index 37d2dcac52dc44db87f5085e89e31c418f75dfd0..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00755.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:72de933c4761dd36b36de4e270d790a6849520343b513735e26b3c8a9cb67fc6 -size 620166 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00779.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00779.webp deleted file mode 100644 index 59d7e92e0e642ce5b6513886151f1a6db861b426..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00779.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:91813b477684f02ad6ec1b47a6b2f473a420b9f79e562f78720e337ad014fec5 -size 622594 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00780.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00780.webp deleted file mode 100644 index e51b1a3c73622b3287f6773c541c3239185e6c63..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00780.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2941b8fade51ab946a203f88cdd291662fc247aa071560422e216e4f8ab28456 -size 625404 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00796.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00796.webp deleted file mode 100644 index acf2613e5348ceb73c02648f1439b4eaa4264ed7..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00796.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4812d377035c46f95b3ec8a0b63d9d5d812cb5e224e2da5af9d1e758cb83e49a -size 626510 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00807.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00807.webp deleted file mode 100644 index df92e71fb9f0271f6e838fbbae9df0908a8f44a4..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00807.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ccbec83a9e4d04495273118a2d5de3e3676571a502b8dbba2f44b47370907aa5 -size 614596 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00811.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00811.webp deleted file mode 100644 index f4ecd9fed547f6d56ad2532c0412bff768341c5e..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00811.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:356ab9f9d78db9245dfc6d3f7176f74fd15595fc621faf78b40b6c55ba8fd635 -size 618164 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00846.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00846.webp deleted file mode 100644 index 9d77da439b4e492da47dc9f64932f50539f147a5..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00846.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:cc660fa019908dea3b45488bb0ff0c2979b6d397da6f7ae9e4f88354a50880c0 -size 617804 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00850.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00850.webp deleted file mode 100644 index 8bebe35f4c985ae1d1309a5c31d4c8f5ff0644a4..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00850.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:84fc6ab27e7a3b57316a15821c945874c0707189464111957cb5f850533e8320 -size 622078 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00885.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00885.webp deleted file mode 100644 index 0323e6ccd66fc9c970c026847243f84d2a07fbaa..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00885.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:dd41c5dbd2a4551974ce636c3dfb9b3e7368b78aacdc3a679eb834c99624215e -size 622024 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00893.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00893.webp deleted file mode 100644 index 774cad11cdcfc5d01a94f58385db961b668bf334..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00893.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0f8166f009890735a1bdf013d2ac633fbddeadbf34953a72cd04a42caa440692 -size 626068 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00903.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00903.webp deleted file mode 100644 index a22525c323042a2a1f819db2a7945ffc8eb1df9b..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00903.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6667f18e985c9afaa96374156ebdbaad92d6b2e3982549b7f3e2e7634a1f1e1d -size 611034 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00915.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00915.webp deleted file mode 100644 index 7b4fba7028240907be02775f0ecd24cb098fab38..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00915.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8ffc14be6fef3162e20ac59872772cef4722e8099a4fbcb9b92c8dda1319a5ee -size 617136 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00939.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00939.webp deleted file mode 100644 index 3a2010c1eff15b37368cbc160dddcdd24eadd0f9..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00939.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c3aecfe19ac67b9aee1270d00276d50882cccb2f9fe69bf99cc2a620480af562 -size 619450 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00942.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00942.webp deleted file mode 100644 index 252f3ac2fd5af7cf3f778c930cebbd5bbaed9e79..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00942.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:681f707535d95c719eecc089a0911fb2b5e71f45aaddf0b4c7eb4d4f907f6394 -size 619116 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00954.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00954.webp deleted file mode 100644 index 7d83b4ef45f2cbf0450b585bba4856ac7c53e346..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00954.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5cbe7a9c019eff5e58f041de55dbb60f8b4e002938fca505e1de711d8cbe9fa6 -size 619716 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00978.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00978.webp deleted file mode 100644 index 663e767997af06c57da5bb2c3d3ff7f8b23ee720..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00978.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7e27e7fec799acb9cac99616bdc139095e80f4a8a775baf1137bf63bceb94d84 -size 621912 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00981.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00981.webp deleted file mode 100644 index d7d7374736e0886b3bd0269efbf4ed23104a128b..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00981.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:65d64db9b8a783f1d5d40d66ae34686802dcbd65442afd2b77e3cf6105382456 -size 615484 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00997.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00997.webp deleted file mode 100644 index f41f591bb9a2b4bec6553b8f47ed5151ca4d3c4d..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/00997.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ff183ea6bac9351145ce46a5063caca65abced37ed212dd2acdc46d9317b7aac -size 628412 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01001.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01001.webp deleted file mode 100644 index d0fe2a38fd249d835a6b1c4f49d303710c043779..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01001.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5b28bf73ac250f8eb67895e866f35aaaa6d8184ab7f2a06a81681a342f1eb6b8 -size 626348 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01017.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01017.webp deleted file mode 100644 index 6ca22319e372956f179262e63cf8aae4471f7b93..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01017.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:767058b512b3978470d3ff51e510df8d7960c1c9c41c3b3fe6d74378e2e13b44 -size 629698 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01040.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01040.webp deleted file mode 100644 index 56eb47f41a3aca9574bf89ebb8237101283a3c06..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01040.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d2a48f4e848e9a18fc8b164c97572b2dd27c76a8a0f4585c7d6c775e039a8569 -size 642476 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01056.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01056.webp deleted file mode 100644 index 07491d5dca89dd64b9fb43cfa0a558b6c610ca83..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01056.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:26108611ae3d8ecc9c4e9044e3c8fe4cb7e3a5f1676f91f94f415816136a9014 -size 641404 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01083.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01083.webp deleted file mode 100644 index 80d66303c467e9823d18f2a898371353c151b0fa..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01083.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3b85a97a28c47293932d925ec3a90dd1e5d369999a6cf381e23ab750a41d8a51 -size 615728 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01095.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01095.webp deleted file mode 100644 index 9c4ff30066e1c612a08b85e3bdac15cf9cb762e0..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01095.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:61e6e12beefcbdaab3e5594599bf0d675c3f6bb703a5c1395d60a58ce073d820 -size 624698 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01105.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01105.webp deleted file mode 100644 index 234c599b1454d3a767823dcb537cf3bd6e0a3126..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01105.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3caaa6f85be7f17fcf3e48daa1097a23f378e39b0aff932ff661930aa8ae0ed6 -size 632374 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01113.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01113.webp deleted file mode 100644 index 02c094bdef4d9f267209a76a9c71a69966c2a152..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01113.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2ba599dc66cf804bfe12564a8620fbc469e1c28d8e83a7e9fed2048cdba782d4 -size 636920 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01129.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01129.webp deleted file mode 100644 index 224b568447687c90516c097743b45681e53ff137..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01129.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:fbd0d2aeb736eeea3fd676e308c85ee76f2c563f6f03037ef6cf2e09a94af954 -size 632112 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01144.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01144.webp deleted file mode 100644 index 322fc023c2206adc511b5b31b43c394e592dcdd2..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01144.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6c03ac4a4a47d67823019e1d56ea8cdc99093472eff72060fa4840fc33d1a5a5 -size 648546 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01152.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01152.webp deleted file mode 100644 index 9f123e0661a03923dd1c624aee1b5b95425e551e..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01152.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:3e9d36cd8b01f8f7ea5076ca6e144e9cd317640ca5248819b7c430676282ac89 -size 577552 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01168.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01168.webp deleted file mode 100644 index d2ebff44bf77e8406c86d4fd7cc38446093c2a90..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01168.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:10d4ab0f4a8c2791e79ad99cc81f0607e1630749e055634ac7f4abc75c8281ba -size 636622 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01187.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01187.webp deleted file mode 100644 index bbc8eb083944e99577ba9258da1582b548102d41..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01187.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8d177b340a94ddff78c70cb7113e38636d4bf896c405297285e0a03bf55cfdca -size 619856 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01191.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01191.webp deleted file mode 100644 index fb15a9ae2ec88dcf2149411efdbfb99a761f2df6..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01191.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:34081e5e69ed5f1d33d7660b9af2f2c1e5c40ca4de2b8398b8e76fc291efc7ff -size 617328 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01200.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01200.webp deleted file mode 100644 index 1f25ec9394ae6c7c1c8d4e5f69e0c692eb29f65a..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01200.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6b4ac8ab7f43b84835b7541b6b6dce7a2b92f1405aab44eb1d690a0869f39409 -size 644818 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01216.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01216.webp deleted file mode 100644 index 664c99e3aa7d0cd58de0c4ac5fea471abf5f0569..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01216.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1299b850e4f0b59f7bc7459b1fee5baf3443da63e4084141580ca1caac2e6e45 -size 649632 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01241.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01241.webp deleted file mode 100644 index f251121871240d45283b168010320f7e13f52b61..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01241.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a29a8a41d1f42f4bdc85c10f511faf56b367188a4692e8c98498e37e77d56cee -size 629122 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01257.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01257.webp deleted file mode 100644 index a1fa4c9e99d58f07c1444d399adeae1dfb1376a5..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01257.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4e5c0918de6c64895b4fa78c5681ef569270f4aeaa0d36f08d04aac210d4f0c2 -size 634538 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01282.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01282.webp deleted file mode 100644 index b283553e5f27dd21553fa324a04d94175847ba4f..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01282.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:38a6ea9ee4bc606c0755b257f659c02eddf2a7c9524c1d9946092136825ae2c8 -size 606566 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01294.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01294.webp deleted file mode 100644 index 865b308997057e39891b652d9c8349b9ac449f9f..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01294.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f44f6c739864e2a2db7b5a3bfa7d35f2f2fcaaf3bbe01150d5d7a57c367a1c83 -size 616248 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01304.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01304.webp deleted file mode 100644 index e1cd34b5f85fd3e35d170e3c24c00c7911e62d35..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01304.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:32e4670901d4ed4ace41df2613c7335b436731c0055cf09456eeca9b41dd7f8a -size 639614 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01312.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01312.webp deleted file mode 100644 index 5df9e2b4d70cbfdb6d8346307d19eb0a1640bc7b..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01312.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d3ee639a2dc11dcb0c2f7a44872b66edd17b0ccdda50e3edd3a9cfff568da2e2 -size 644022 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01328.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01328.webp deleted file mode 100644 index 6eae940c93d968801b02cd9b11ae29b9a6c8ffce..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01328.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d3e67ab4fb8b1ecb3f6fee0c8b02f185849d4d3af2a0dbc3921ab7d9d52e3422 -size 647802 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01345.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01345.webp deleted file mode 100644 index 2b1fc90cb5ed1b83445b4e254f1bd7c4a753c0da..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01345.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:08c51ca9a9da3bea5f04c0682055969a8c59e241bbe61b45f36ccf5b11fa0cbb -size 640302 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01353.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01353.webp deleted file mode 100644 index ce0f89e88e5eb4acbd350450cfed9ca15aeb8d46..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01353.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:465d9e4c019120a9d7d1f07b346db74606b418eb0ae67407816b7e3323152378 -size 634308 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01369.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01369.webp deleted file mode 100644 index cbf4fa410dbda5e80788dbbc35d8b6c07ab51750..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01369.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b143ece8174feccbb3a221ecc4be269935d7a3a89fa4709e8b5ac925e920d51d -size 634836 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01386.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01386.webp deleted file mode 100644 index ed0edd2bdff62ad7d2a8157e4abc39171a7b469e..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01386.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2f8be67fca44029212a5b5fddc109254c617a21f311ac8e3a248230aacbb605b -size 631022 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01390.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01390.webp deleted file mode 100644 index 8f8607a66d7180786da9e7c676218657bd1a9ac4..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01390.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:897d30347b7d43931d501c9930467df94de87b168350e432f3844c5b0b24c669 -size 646258 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01406.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01406.webp deleted file mode 100644 index c8ae05546d8ba894d5aa1bfef3fd3b1a02894c3a..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01406.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ba32dfdfee1357793e41a049fbb603b3cac8a11938f2570a6275d4da8fe5e5ff -size 618906 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01410.webp b/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01410.webp deleted file mode 100644 index bd68c4942fcccb6ea4da021465aedf6805648b75..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/front_one_piece_dress_nodded_cut/frames/01410.webp +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:0cda28b8e100f8e9e67714b81d7206b44732a1073ba75055838a4fb1e6b7c258 -size 609438 diff --git a/stf/works/preprocess/nasilhong_f_v1_front/metadata.json b/stf/works/preprocess/nasilhong_f_v1_front/metadata.json deleted file mode 100644 index 87592de56e2223871951953f1f45072da2f0c7b0..0000000000000000000000000000000000000000 --- a/stf/works/preprocess/nasilhong_f_v1_front/metadata.json +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:06c2997fe0bba05ca3922fd09d0ce435fb175d94de722d4027a6f82bdfeb6946 -size 42